##// END OF EJS Templates
debugindex: move to a flexible column...
marmoute -
r50148:a3213042 default
parent child Browse files
Show More
@@ -1,5032 +1,5034 b''
1 # debugcommands.py - command processing for debug* commands
1 # debugcommands.py - command processing for debug* commands
2 #
2 #
3 # Copyright 2005-2016 Olivia Mackall <olivia@selenic.com>
3 # Copyright 2005-2016 Olivia Mackall <olivia@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8
8
9 import binascii
9 import binascii
10 import codecs
10 import codecs
11 import collections
11 import collections
12 import contextlib
12 import contextlib
13 import difflib
13 import difflib
14 import errno
14 import errno
15 import glob
15 import glob
16 import operator
16 import operator
17 import os
17 import os
18 import platform
18 import platform
19 import random
19 import random
20 import re
20 import re
21 import socket
21 import socket
22 import ssl
22 import ssl
23 import stat
23 import stat
24 import string
24 import string
25 import subprocess
25 import subprocess
26 import sys
26 import sys
27 import time
27 import time
28
28
29 from .i18n import _
29 from .i18n import _
30 from .node import (
30 from .node import (
31 bin,
31 bin,
32 hex,
32 hex,
33 nullrev,
33 nullrev,
34 short,
34 short,
35 )
35 )
36 from .pycompat import (
36 from .pycompat import (
37 getattr,
37 getattr,
38 open,
38 open,
39 )
39 )
40 from . import (
40 from . import (
41 bundle2,
41 bundle2,
42 bundlerepo,
42 bundlerepo,
43 changegroup,
43 changegroup,
44 cmdutil,
44 cmdutil,
45 color,
45 color,
46 context,
46 context,
47 copies,
47 copies,
48 dagparser,
48 dagparser,
49 dirstateutils,
49 dirstateutils,
50 encoding,
50 encoding,
51 error,
51 error,
52 exchange,
52 exchange,
53 extensions,
53 extensions,
54 filemerge,
54 filemerge,
55 filesetlang,
55 filesetlang,
56 formatter,
56 formatter,
57 hg,
57 hg,
58 httppeer,
58 httppeer,
59 localrepo,
59 localrepo,
60 lock as lockmod,
60 lock as lockmod,
61 logcmdutil,
61 logcmdutil,
62 mergestate as mergestatemod,
62 mergestate as mergestatemod,
63 metadata,
63 metadata,
64 obsolete,
64 obsolete,
65 obsutil,
65 obsutil,
66 pathutil,
66 pathutil,
67 phases,
67 phases,
68 policy,
68 policy,
69 pvec,
69 pvec,
70 pycompat,
70 pycompat,
71 registrar,
71 registrar,
72 repair,
72 repair,
73 repoview,
73 repoview,
74 requirements,
74 requirements,
75 revlog,
75 revlog,
76 revlogutils,
76 revlogutils,
77 revset,
77 revset,
78 revsetlang,
78 revsetlang,
79 scmutil,
79 scmutil,
80 setdiscovery,
80 setdiscovery,
81 simplemerge,
81 simplemerge,
82 sshpeer,
82 sshpeer,
83 sslutil,
83 sslutil,
84 streamclone,
84 streamclone,
85 strip,
85 strip,
86 tags as tagsmod,
86 tags as tagsmod,
87 templater,
87 templater,
88 treediscovery,
88 treediscovery,
89 upgrade,
89 upgrade,
90 url as urlmod,
90 url as urlmod,
91 util,
91 util,
92 vfs as vfsmod,
92 vfs as vfsmod,
93 wireprotoframing,
93 wireprotoframing,
94 wireprotoserver,
94 wireprotoserver,
95 )
95 )
96 from .interfaces import repository
96 from .interfaces import repository
97 from .utils import (
97 from .utils import (
98 cborutil,
98 cborutil,
99 compression,
99 compression,
100 dateutil,
100 dateutil,
101 procutil,
101 procutil,
102 stringutil,
102 stringutil,
103 urlutil,
103 urlutil,
104 )
104 )
105
105
106 from .revlogutils import (
106 from .revlogutils import (
107 constants as revlog_constants,
107 constants as revlog_constants,
108 debug as revlog_debug,
108 debug as revlog_debug,
109 deltas as deltautil,
109 deltas as deltautil,
110 nodemap,
110 nodemap,
111 rewrite,
111 rewrite,
112 sidedata,
112 sidedata,
113 )
113 )
114
114
115 release = lockmod.release
115 release = lockmod.release
116
116
117 table = {}
117 table = {}
118 table.update(strip.command._table)
118 table.update(strip.command._table)
119 command = registrar.command(table)
119 command = registrar.command(table)
120
120
121
121
122 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
122 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
123 def debugancestor(ui, repo, *args):
123 def debugancestor(ui, repo, *args):
124 """find the ancestor revision of two revisions in a given index"""
124 """find the ancestor revision of two revisions in a given index"""
125 if len(args) == 3:
125 if len(args) == 3:
126 index, rev1, rev2 = args
126 index, rev1, rev2 = args
127 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
127 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
128 lookup = r.lookup
128 lookup = r.lookup
129 elif len(args) == 2:
129 elif len(args) == 2:
130 if not repo:
130 if not repo:
131 raise error.Abort(
131 raise error.Abort(
132 _(b'there is no Mercurial repository here (.hg not found)')
132 _(b'there is no Mercurial repository here (.hg not found)')
133 )
133 )
134 rev1, rev2 = args
134 rev1, rev2 = args
135 r = repo.changelog
135 r = repo.changelog
136 lookup = repo.lookup
136 lookup = repo.lookup
137 else:
137 else:
138 raise error.Abort(_(b'either two or three arguments required'))
138 raise error.Abort(_(b'either two or three arguments required'))
139 a = r.ancestor(lookup(rev1), lookup(rev2))
139 a = r.ancestor(lookup(rev1), lookup(rev2))
140 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
140 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
141
141
142
142
143 @command(b'debugantivirusrunning', [])
143 @command(b'debugantivirusrunning', [])
144 def debugantivirusrunning(ui, repo):
144 def debugantivirusrunning(ui, repo):
145 """attempt to trigger an antivirus scanner to see if one is active"""
145 """attempt to trigger an antivirus scanner to see if one is active"""
146 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
146 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
147 f.write(
147 f.write(
148 util.b85decode(
148 util.b85decode(
149 # This is a base85-armored version of the EICAR test file. See
149 # This is a base85-armored version of the EICAR test file. See
150 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
150 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
151 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
151 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
152 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
152 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
153 )
153 )
154 )
154 )
155 # Give an AV engine time to scan the file.
155 # Give an AV engine time to scan the file.
156 time.sleep(2)
156 time.sleep(2)
157 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
157 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
158
158
159
159
160 @command(b'debugapplystreamclonebundle', [], b'FILE')
160 @command(b'debugapplystreamclonebundle', [], b'FILE')
161 def debugapplystreamclonebundle(ui, repo, fname):
161 def debugapplystreamclonebundle(ui, repo, fname):
162 """apply a stream clone bundle file"""
162 """apply a stream clone bundle file"""
163 f = hg.openpath(ui, fname)
163 f = hg.openpath(ui, fname)
164 gen = exchange.readbundle(ui, f, fname)
164 gen = exchange.readbundle(ui, f, fname)
165 gen.apply(repo)
165 gen.apply(repo)
166
166
167
167
168 @command(
168 @command(
169 b'debugbuilddag',
169 b'debugbuilddag',
170 [
170 [
171 (
171 (
172 b'm',
172 b'm',
173 b'mergeable-file',
173 b'mergeable-file',
174 None,
174 None,
175 _(b'add single file mergeable changes'),
175 _(b'add single file mergeable changes'),
176 ),
176 ),
177 (
177 (
178 b'o',
178 b'o',
179 b'overwritten-file',
179 b'overwritten-file',
180 None,
180 None,
181 _(b'add single file all revs overwrite'),
181 _(b'add single file all revs overwrite'),
182 ),
182 ),
183 (b'n', b'new-file', None, _(b'add new file at each rev')),
183 (b'n', b'new-file', None, _(b'add new file at each rev')),
184 (
184 (
185 b'',
185 b'',
186 b'from-existing',
186 b'from-existing',
187 None,
187 None,
188 _(b'continue from a non-empty repository'),
188 _(b'continue from a non-empty repository'),
189 ),
189 ),
190 ],
190 ],
191 _(b'[OPTION]... [TEXT]'),
191 _(b'[OPTION]... [TEXT]'),
192 )
192 )
193 def debugbuilddag(
193 def debugbuilddag(
194 ui,
194 ui,
195 repo,
195 repo,
196 text=None,
196 text=None,
197 mergeable_file=False,
197 mergeable_file=False,
198 overwritten_file=False,
198 overwritten_file=False,
199 new_file=False,
199 new_file=False,
200 from_existing=False,
200 from_existing=False,
201 ):
201 ):
202 """builds a repo with a given DAG from scratch in the current empty repo
202 """builds a repo with a given DAG from scratch in the current empty repo
203
203
204 The description of the DAG is read from stdin if not given on the
204 The description of the DAG is read from stdin if not given on the
205 command line.
205 command line.
206
206
207 Elements:
207 Elements:
208
208
209 - "+n" is a linear run of n nodes based on the current default parent
209 - "+n" is a linear run of n nodes based on the current default parent
210 - "." is a single node based on the current default parent
210 - "." is a single node based on the current default parent
211 - "$" resets the default parent to null (implied at the start);
211 - "$" resets the default parent to null (implied at the start);
212 otherwise the default parent is always the last node created
212 otherwise the default parent is always the last node created
213 - "<p" sets the default parent to the backref p
213 - "<p" sets the default parent to the backref p
214 - "*p" is a fork at parent p, which is a backref
214 - "*p" is a fork at parent p, which is a backref
215 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
215 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
216 - "/p2" is a merge of the preceding node and p2
216 - "/p2" is a merge of the preceding node and p2
217 - ":tag" defines a local tag for the preceding node
217 - ":tag" defines a local tag for the preceding node
218 - "@branch" sets the named branch for subsequent nodes
218 - "@branch" sets the named branch for subsequent nodes
219 - "#...\\n" is a comment up to the end of the line
219 - "#...\\n" is a comment up to the end of the line
220
220
221 Whitespace between the above elements is ignored.
221 Whitespace between the above elements is ignored.
222
222
223 A backref is either
223 A backref is either
224
224
225 - a number n, which references the node curr-n, where curr is the current
225 - a number n, which references the node curr-n, where curr is the current
226 node, or
226 node, or
227 - the name of a local tag you placed earlier using ":tag", or
227 - the name of a local tag you placed earlier using ":tag", or
228 - empty to denote the default parent.
228 - empty to denote the default parent.
229
229
230 All string valued-elements are either strictly alphanumeric, or must
230 All string valued-elements are either strictly alphanumeric, or must
231 be enclosed in double quotes ("..."), with "\\" as escape character.
231 be enclosed in double quotes ("..."), with "\\" as escape character.
232 """
232 """
233
233
234 if text is None:
234 if text is None:
235 ui.status(_(b"reading DAG from stdin\n"))
235 ui.status(_(b"reading DAG from stdin\n"))
236 text = ui.fin.read()
236 text = ui.fin.read()
237
237
238 cl = repo.changelog
238 cl = repo.changelog
239 if len(cl) > 0 and not from_existing:
239 if len(cl) > 0 and not from_existing:
240 raise error.Abort(_(b'repository is not empty'))
240 raise error.Abort(_(b'repository is not empty'))
241
241
242 # determine number of revs in DAG
242 # determine number of revs in DAG
243 total = 0
243 total = 0
244 for type, data in dagparser.parsedag(text):
244 for type, data in dagparser.parsedag(text):
245 if type == b'n':
245 if type == b'n':
246 total += 1
246 total += 1
247
247
248 if mergeable_file:
248 if mergeable_file:
249 linesperrev = 2
249 linesperrev = 2
250 # make a file with k lines per rev
250 # make a file with k lines per rev
251 initialmergedlines = [
251 initialmergedlines = [
252 b'%d' % i for i in pycompat.xrange(0, total * linesperrev)
252 b'%d' % i for i in pycompat.xrange(0, total * linesperrev)
253 ]
253 ]
254 initialmergedlines.append(b"")
254 initialmergedlines.append(b"")
255
255
256 tags = []
256 tags = []
257 progress = ui.makeprogress(
257 progress = ui.makeprogress(
258 _(b'building'), unit=_(b'revisions'), total=total
258 _(b'building'), unit=_(b'revisions'), total=total
259 )
259 )
260 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
260 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
261 at = -1
261 at = -1
262 atbranch = b'default'
262 atbranch = b'default'
263 nodeids = []
263 nodeids = []
264 id = 0
264 id = 0
265 progress.update(id)
265 progress.update(id)
266 for type, data in dagparser.parsedag(text):
266 for type, data in dagparser.parsedag(text):
267 if type == b'n':
267 if type == b'n':
268 ui.note((b'node %s\n' % pycompat.bytestr(data)))
268 ui.note((b'node %s\n' % pycompat.bytestr(data)))
269 id, ps = data
269 id, ps = data
270
270
271 files = []
271 files = []
272 filecontent = {}
272 filecontent = {}
273
273
274 p2 = None
274 p2 = None
275 if mergeable_file:
275 if mergeable_file:
276 fn = b"mf"
276 fn = b"mf"
277 p1 = repo[ps[0]]
277 p1 = repo[ps[0]]
278 if len(ps) > 1:
278 if len(ps) > 1:
279 p2 = repo[ps[1]]
279 p2 = repo[ps[1]]
280 pa = p1.ancestor(p2)
280 pa = p1.ancestor(p2)
281 base, local, other = [
281 base, local, other = [
282 x[fn].data() for x in (pa, p1, p2)
282 x[fn].data() for x in (pa, p1, p2)
283 ]
283 ]
284 m3 = simplemerge.Merge3Text(base, local, other)
284 m3 = simplemerge.Merge3Text(base, local, other)
285 ml = [
285 ml = [
286 l.strip()
286 l.strip()
287 for l in simplemerge.render_minimized(m3)[0]
287 for l in simplemerge.render_minimized(m3)[0]
288 ]
288 ]
289 ml.append(b"")
289 ml.append(b"")
290 elif at > 0:
290 elif at > 0:
291 ml = p1[fn].data().split(b"\n")
291 ml = p1[fn].data().split(b"\n")
292 else:
292 else:
293 ml = initialmergedlines
293 ml = initialmergedlines
294 ml[id * linesperrev] += b" r%i" % id
294 ml[id * linesperrev] += b" r%i" % id
295 mergedtext = b"\n".join(ml)
295 mergedtext = b"\n".join(ml)
296 files.append(fn)
296 files.append(fn)
297 filecontent[fn] = mergedtext
297 filecontent[fn] = mergedtext
298
298
299 if overwritten_file:
299 if overwritten_file:
300 fn = b"of"
300 fn = b"of"
301 files.append(fn)
301 files.append(fn)
302 filecontent[fn] = b"r%i\n" % id
302 filecontent[fn] = b"r%i\n" % id
303
303
304 if new_file:
304 if new_file:
305 fn = b"nf%i" % id
305 fn = b"nf%i" % id
306 files.append(fn)
306 files.append(fn)
307 filecontent[fn] = b"r%i\n" % id
307 filecontent[fn] = b"r%i\n" % id
308 if len(ps) > 1:
308 if len(ps) > 1:
309 if not p2:
309 if not p2:
310 p2 = repo[ps[1]]
310 p2 = repo[ps[1]]
311 for fn in p2:
311 for fn in p2:
312 if fn.startswith(b"nf"):
312 if fn.startswith(b"nf"):
313 files.append(fn)
313 files.append(fn)
314 filecontent[fn] = p2[fn].data()
314 filecontent[fn] = p2[fn].data()
315
315
316 def fctxfn(repo, cx, path):
316 def fctxfn(repo, cx, path):
317 if path in filecontent:
317 if path in filecontent:
318 return context.memfilectx(
318 return context.memfilectx(
319 repo, cx, path, filecontent[path]
319 repo, cx, path, filecontent[path]
320 )
320 )
321 return None
321 return None
322
322
323 if len(ps) == 0 or ps[0] < 0:
323 if len(ps) == 0 or ps[0] < 0:
324 pars = [None, None]
324 pars = [None, None]
325 elif len(ps) == 1:
325 elif len(ps) == 1:
326 pars = [nodeids[ps[0]], None]
326 pars = [nodeids[ps[0]], None]
327 else:
327 else:
328 pars = [nodeids[p] for p in ps]
328 pars = [nodeids[p] for p in ps]
329 cx = context.memctx(
329 cx = context.memctx(
330 repo,
330 repo,
331 pars,
331 pars,
332 b"r%i" % id,
332 b"r%i" % id,
333 files,
333 files,
334 fctxfn,
334 fctxfn,
335 date=(id, 0),
335 date=(id, 0),
336 user=b"debugbuilddag",
336 user=b"debugbuilddag",
337 extra={b'branch': atbranch},
337 extra={b'branch': atbranch},
338 )
338 )
339 nodeid = repo.commitctx(cx)
339 nodeid = repo.commitctx(cx)
340 nodeids.append(nodeid)
340 nodeids.append(nodeid)
341 at = id
341 at = id
342 elif type == b'l':
342 elif type == b'l':
343 id, name = data
343 id, name = data
344 ui.note((b'tag %s\n' % name))
344 ui.note((b'tag %s\n' % name))
345 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
345 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
346 elif type == b'a':
346 elif type == b'a':
347 ui.note((b'branch %s\n' % data))
347 ui.note((b'branch %s\n' % data))
348 atbranch = data
348 atbranch = data
349 progress.update(id)
349 progress.update(id)
350
350
351 if tags:
351 if tags:
352 repo.vfs.write(b"localtags", b"".join(tags))
352 repo.vfs.write(b"localtags", b"".join(tags))
353
353
354
354
355 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
355 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
356 indent_string = b' ' * indent
356 indent_string = b' ' * indent
357 if all:
357 if all:
358 ui.writenoi18n(
358 ui.writenoi18n(
359 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
359 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
360 % indent_string
360 % indent_string
361 )
361 )
362
362
363 def showchunks(named):
363 def showchunks(named):
364 ui.write(b"\n%s%s\n" % (indent_string, named))
364 ui.write(b"\n%s%s\n" % (indent_string, named))
365 for deltadata in gen.deltaiter():
365 for deltadata in gen.deltaiter():
366 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
366 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
367 ui.write(
367 ui.write(
368 b"%s%s %s %s %s %s %d\n"
368 b"%s%s %s %s %s %s %d\n"
369 % (
369 % (
370 indent_string,
370 indent_string,
371 hex(node),
371 hex(node),
372 hex(p1),
372 hex(p1),
373 hex(p2),
373 hex(p2),
374 hex(cs),
374 hex(cs),
375 hex(deltabase),
375 hex(deltabase),
376 len(delta),
376 len(delta),
377 )
377 )
378 )
378 )
379
379
380 gen.changelogheader()
380 gen.changelogheader()
381 showchunks(b"changelog")
381 showchunks(b"changelog")
382 gen.manifestheader()
382 gen.manifestheader()
383 showchunks(b"manifest")
383 showchunks(b"manifest")
384 for chunkdata in iter(gen.filelogheader, {}):
384 for chunkdata in iter(gen.filelogheader, {}):
385 fname = chunkdata[b'filename']
385 fname = chunkdata[b'filename']
386 showchunks(fname)
386 showchunks(fname)
387 else:
387 else:
388 if isinstance(gen, bundle2.unbundle20):
388 if isinstance(gen, bundle2.unbundle20):
389 raise error.Abort(_(b'use debugbundle2 for this file'))
389 raise error.Abort(_(b'use debugbundle2 for this file'))
390 gen.changelogheader()
390 gen.changelogheader()
391 for deltadata in gen.deltaiter():
391 for deltadata in gen.deltaiter():
392 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
392 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
393 ui.write(b"%s%s\n" % (indent_string, hex(node)))
393 ui.write(b"%s%s\n" % (indent_string, hex(node)))
394
394
395
395
396 def _debugobsmarkers(ui, part, indent=0, **opts):
396 def _debugobsmarkers(ui, part, indent=0, **opts):
397 """display version and markers contained in 'data'"""
397 """display version and markers contained in 'data'"""
398 opts = pycompat.byteskwargs(opts)
398 opts = pycompat.byteskwargs(opts)
399 data = part.read()
399 data = part.read()
400 indent_string = b' ' * indent
400 indent_string = b' ' * indent
401 try:
401 try:
402 version, markers = obsolete._readmarkers(data)
402 version, markers = obsolete._readmarkers(data)
403 except error.UnknownVersion as exc:
403 except error.UnknownVersion as exc:
404 msg = b"%sunsupported version: %s (%d bytes)\n"
404 msg = b"%sunsupported version: %s (%d bytes)\n"
405 msg %= indent_string, exc.version, len(data)
405 msg %= indent_string, exc.version, len(data)
406 ui.write(msg)
406 ui.write(msg)
407 else:
407 else:
408 msg = b"%sversion: %d (%d bytes)\n"
408 msg = b"%sversion: %d (%d bytes)\n"
409 msg %= indent_string, version, len(data)
409 msg %= indent_string, version, len(data)
410 ui.write(msg)
410 ui.write(msg)
411 fm = ui.formatter(b'debugobsolete', opts)
411 fm = ui.formatter(b'debugobsolete', opts)
412 for rawmarker in sorted(markers):
412 for rawmarker in sorted(markers):
413 m = obsutil.marker(None, rawmarker)
413 m = obsutil.marker(None, rawmarker)
414 fm.startitem()
414 fm.startitem()
415 fm.plain(indent_string)
415 fm.plain(indent_string)
416 cmdutil.showmarker(fm, m)
416 cmdutil.showmarker(fm, m)
417 fm.end()
417 fm.end()
418
418
419
419
420 def _debugphaseheads(ui, data, indent=0):
420 def _debugphaseheads(ui, data, indent=0):
421 """display version and markers contained in 'data'"""
421 """display version and markers contained in 'data'"""
422 indent_string = b' ' * indent
422 indent_string = b' ' * indent
423 headsbyphase = phases.binarydecode(data)
423 headsbyphase = phases.binarydecode(data)
424 for phase in phases.allphases:
424 for phase in phases.allphases:
425 for head in headsbyphase[phase]:
425 for head in headsbyphase[phase]:
426 ui.write(indent_string)
426 ui.write(indent_string)
427 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
427 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
428
428
429
429
430 def _quasirepr(thing):
430 def _quasirepr(thing):
431 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
431 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
432 return b'{%s}' % (
432 return b'{%s}' % (
433 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
433 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
434 )
434 )
435 return pycompat.bytestr(repr(thing))
435 return pycompat.bytestr(repr(thing))
436
436
437
437
438 def _debugbundle2(ui, gen, all=None, **opts):
438 def _debugbundle2(ui, gen, all=None, **opts):
439 """lists the contents of a bundle2"""
439 """lists the contents of a bundle2"""
440 if not isinstance(gen, bundle2.unbundle20):
440 if not isinstance(gen, bundle2.unbundle20):
441 raise error.Abort(_(b'not a bundle2 file'))
441 raise error.Abort(_(b'not a bundle2 file'))
442 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
442 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
443 parttypes = opts.get('part_type', [])
443 parttypes = opts.get('part_type', [])
444 for part in gen.iterparts():
444 for part in gen.iterparts():
445 if parttypes and part.type not in parttypes:
445 if parttypes and part.type not in parttypes:
446 continue
446 continue
447 msg = b'%s -- %s (mandatory: %r)\n'
447 msg = b'%s -- %s (mandatory: %r)\n'
448 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
448 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
449 if part.type == b'changegroup':
449 if part.type == b'changegroup':
450 version = part.params.get(b'version', b'01')
450 version = part.params.get(b'version', b'01')
451 cg = changegroup.getunbundler(version, part, b'UN')
451 cg = changegroup.getunbundler(version, part, b'UN')
452 if not ui.quiet:
452 if not ui.quiet:
453 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
453 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
454 if part.type == b'obsmarkers':
454 if part.type == b'obsmarkers':
455 if not ui.quiet:
455 if not ui.quiet:
456 _debugobsmarkers(ui, part, indent=4, **opts)
456 _debugobsmarkers(ui, part, indent=4, **opts)
457 if part.type == b'phase-heads':
457 if part.type == b'phase-heads':
458 if not ui.quiet:
458 if not ui.quiet:
459 _debugphaseheads(ui, part, indent=4)
459 _debugphaseheads(ui, part, indent=4)
460
460
461
461
462 @command(
462 @command(
463 b'debugbundle',
463 b'debugbundle',
464 [
464 [
465 (b'a', b'all', None, _(b'show all details')),
465 (b'a', b'all', None, _(b'show all details')),
466 (b'', b'part-type', [], _(b'show only the named part type')),
466 (b'', b'part-type', [], _(b'show only the named part type')),
467 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
467 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
468 ],
468 ],
469 _(b'FILE'),
469 _(b'FILE'),
470 norepo=True,
470 norepo=True,
471 )
471 )
472 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
472 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
473 """lists the contents of a bundle"""
473 """lists the contents of a bundle"""
474 with hg.openpath(ui, bundlepath) as f:
474 with hg.openpath(ui, bundlepath) as f:
475 if spec:
475 if spec:
476 spec = exchange.getbundlespec(ui, f)
476 spec = exchange.getbundlespec(ui, f)
477 ui.write(b'%s\n' % spec)
477 ui.write(b'%s\n' % spec)
478 return
478 return
479
479
480 gen = exchange.readbundle(ui, f, bundlepath)
480 gen = exchange.readbundle(ui, f, bundlepath)
481 if isinstance(gen, bundle2.unbundle20):
481 if isinstance(gen, bundle2.unbundle20):
482 return _debugbundle2(ui, gen, all=all, **opts)
482 return _debugbundle2(ui, gen, all=all, **opts)
483 _debugchangegroup(ui, gen, all=all, **opts)
483 _debugchangegroup(ui, gen, all=all, **opts)
484
484
485
485
486 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
486 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
487 def debugcapabilities(ui, path, **opts):
487 def debugcapabilities(ui, path, **opts):
488 """lists the capabilities of a remote peer"""
488 """lists the capabilities of a remote peer"""
489 opts = pycompat.byteskwargs(opts)
489 opts = pycompat.byteskwargs(opts)
490 peer = hg.peer(ui, opts, path)
490 peer = hg.peer(ui, opts, path)
491 try:
491 try:
492 caps = peer.capabilities()
492 caps = peer.capabilities()
493 ui.writenoi18n(b'Main capabilities:\n')
493 ui.writenoi18n(b'Main capabilities:\n')
494 for c in sorted(caps):
494 for c in sorted(caps):
495 ui.write(b' %s\n' % c)
495 ui.write(b' %s\n' % c)
496 b2caps = bundle2.bundle2caps(peer)
496 b2caps = bundle2.bundle2caps(peer)
497 if b2caps:
497 if b2caps:
498 ui.writenoi18n(b'Bundle2 capabilities:\n')
498 ui.writenoi18n(b'Bundle2 capabilities:\n')
499 for key, values in sorted(b2caps.items()):
499 for key, values in sorted(b2caps.items()):
500 ui.write(b' %s\n' % key)
500 ui.write(b' %s\n' % key)
501 for v in values:
501 for v in values:
502 ui.write(b' %s\n' % v)
502 ui.write(b' %s\n' % v)
503 finally:
503 finally:
504 peer.close()
504 peer.close()
505
505
506
506
507 @command(
507 @command(
508 b'debugchangedfiles',
508 b'debugchangedfiles',
509 [
509 [
510 (
510 (
511 b'',
511 b'',
512 b'compute',
512 b'compute',
513 False,
513 False,
514 b"compute information instead of reading it from storage",
514 b"compute information instead of reading it from storage",
515 ),
515 ),
516 ],
516 ],
517 b'REV',
517 b'REV',
518 )
518 )
519 def debugchangedfiles(ui, repo, rev, **opts):
519 def debugchangedfiles(ui, repo, rev, **opts):
520 """list the stored files changes for a revision"""
520 """list the stored files changes for a revision"""
521 ctx = logcmdutil.revsingle(repo, rev, None)
521 ctx = logcmdutil.revsingle(repo, rev, None)
522 files = None
522 files = None
523
523
524 if opts['compute']:
524 if opts['compute']:
525 files = metadata.compute_all_files_changes(ctx)
525 files = metadata.compute_all_files_changes(ctx)
526 else:
526 else:
527 sd = repo.changelog.sidedata(ctx.rev())
527 sd = repo.changelog.sidedata(ctx.rev())
528 files_block = sd.get(sidedata.SD_FILES)
528 files_block = sd.get(sidedata.SD_FILES)
529 if files_block is not None:
529 if files_block is not None:
530 files = metadata.decode_files_sidedata(sd)
530 files = metadata.decode_files_sidedata(sd)
531 if files is not None:
531 if files is not None:
532 for f in sorted(files.touched):
532 for f in sorted(files.touched):
533 if f in files.added:
533 if f in files.added:
534 action = b"added"
534 action = b"added"
535 elif f in files.removed:
535 elif f in files.removed:
536 action = b"removed"
536 action = b"removed"
537 elif f in files.merged:
537 elif f in files.merged:
538 action = b"merged"
538 action = b"merged"
539 elif f in files.salvaged:
539 elif f in files.salvaged:
540 action = b"salvaged"
540 action = b"salvaged"
541 else:
541 else:
542 action = b"touched"
542 action = b"touched"
543
543
544 copy_parent = b""
544 copy_parent = b""
545 copy_source = b""
545 copy_source = b""
546 if f in files.copied_from_p1:
546 if f in files.copied_from_p1:
547 copy_parent = b"p1"
547 copy_parent = b"p1"
548 copy_source = files.copied_from_p1[f]
548 copy_source = files.copied_from_p1[f]
549 elif f in files.copied_from_p2:
549 elif f in files.copied_from_p2:
550 copy_parent = b"p2"
550 copy_parent = b"p2"
551 copy_source = files.copied_from_p2[f]
551 copy_source = files.copied_from_p2[f]
552
552
553 data = (action, copy_parent, f, copy_source)
553 data = (action, copy_parent, f, copy_source)
554 template = b"%-8s %2s: %s, %s;\n"
554 template = b"%-8s %2s: %s, %s;\n"
555 ui.write(template % data)
555 ui.write(template % data)
556
556
557
557
558 @command(b'debugcheckstate', [], b'')
558 @command(b'debugcheckstate', [], b'')
559 def debugcheckstate(ui, repo):
559 def debugcheckstate(ui, repo):
560 """validate the correctness of the current dirstate"""
560 """validate the correctness of the current dirstate"""
561 parent1, parent2 = repo.dirstate.parents()
561 parent1, parent2 = repo.dirstate.parents()
562 m1 = repo[parent1].manifest()
562 m1 = repo[parent1].manifest()
563 m2 = repo[parent2].manifest()
563 m2 = repo[parent2].manifest()
564 errors = 0
564 errors = 0
565 for err in repo.dirstate.verify(m1, m2):
565 for err in repo.dirstate.verify(m1, m2):
566 ui.warn(err[0] % err[1:])
566 ui.warn(err[0] % err[1:])
567 errors += 1
567 errors += 1
568 if errors:
568 if errors:
569 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
569 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
570 raise error.Abort(errstr)
570 raise error.Abort(errstr)
571
571
572
572
573 @command(
573 @command(
574 b'debugcolor',
574 b'debugcolor',
575 [(b'', b'style', None, _(b'show all configured styles'))],
575 [(b'', b'style', None, _(b'show all configured styles'))],
576 b'hg debugcolor',
576 b'hg debugcolor',
577 )
577 )
578 def debugcolor(ui, repo, **opts):
578 def debugcolor(ui, repo, **opts):
579 """show available color, effects or style"""
579 """show available color, effects or style"""
580 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
580 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
581 if opts.get('style'):
581 if opts.get('style'):
582 return _debugdisplaystyle(ui)
582 return _debugdisplaystyle(ui)
583 else:
583 else:
584 return _debugdisplaycolor(ui)
584 return _debugdisplaycolor(ui)
585
585
586
586
587 def _debugdisplaycolor(ui):
587 def _debugdisplaycolor(ui):
588 ui = ui.copy()
588 ui = ui.copy()
589 ui._styles.clear()
589 ui._styles.clear()
590 for effect in color._activeeffects(ui).keys():
590 for effect in color._activeeffects(ui).keys():
591 ui._styles[effect] = effect
591 ui._styles[effect] = effect
592 if ui._terminfoparams:
592 if ui._terminfoparams:
593 for k, v in ui.configitems(b'color'):
593 for k, v in ui.configitems(b'color'):
594 if k.startswith(b'color.'):
594 if k.startswith(b'color.'):
595 ui._styles[k] = k[6:]
595 ui._styles[k] = k[6:]
596 elif k.startswith(b'terminfo.'):
596 elif k.startswith(b'terminfo.'):
597 ui._styles[k] = k[9:]
597 ui._styles[k] = k[9:]
598 ui.write(_(b'available colors:\n'))
598 ui.write(_(b'available colors:\n'))
599 # sort label with a '_' after the other to group '_background' entry.
599 # sort label with a '_' after the other to group '_background' entry.
600 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
600 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
601 for colorname, label in items:
601 for colorname, label in items:
602 ui.write(b'%s\n' % colorname, label=label)
602 ui.write(b'%s\n' % colorname, label=label)
603
603
604
604
605 def _debugdisplaystyle(ui):
605 def _debugdisplaystyle(ui):
606 ui.write(_(b'available style:\n'))
606 ui.write(_(b'available style:\n'))
607 if not ui._styles:
607 if not ui._styles:
608 return
608 return
609 width = max(len(s) for s in ui._styles)
609 width = max(len(s) for s in ui._styles)
610 for label, effects in sorted(ui._styles.items()):
610 for label, effects in sorted(ui._styles.items()):
611 ui.write(b'%s' % label, label=label)
611 ui.write(b'%s' % label, label=label)
612 if effects:
612 if effects:
613 # 50
613 # 50
614 ui.write(b': ')
614 ui.write(b': ')
615 ui.write(b' ' * (max(0, width - len(label))))
615 ui.write(b' ' * (max(0, width - len(label))))
616 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
616 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
617 ui.write(b'\n')
617 ui.write(b'\n')
618
618
619
619
620 @command(b'debugcreatestreamclonebundle', [], b'FILE')
620 @command(b'debugcreatestreamclonebundle', [], b'FILE')
621 def debugcreatestreamclonebundle(ui, repo, fname):
621 def debugcreatestreamclonebundle(ui, repo, fname):
622 """create a stream clone bundle file
622 """create a stream clone bundle file
623
623
624 Stream bundles are special bundles that are essentially archives of
624 Stream bundles are special bundles that are essentially archives of
625 revlog files. They are commonly used for cloning very quickly.
625 revlog files. They are commonly used for cloning very quickly.
626 """
626 """
627 # TODO we may want to turn this into an abort when this functionality
627 # TODO we may want to turn this into an abort when this functionality
628 # is moved into `hg bundle`.
628 # is moved into `hg bundle`.
629 if phases.hassecret(repo):
629 if phases.hassecret(repo):
630 ui.warn(
630 ui.warn(
631 _(
631 _(
632 b'(warning: stream clone bundle will contain secret '
632 b'(warning: stream clone bundle will contain secret '
633 b'revisions)\n'
633 b'revisions)\n'
634 )
634 )
635 )
635 )
636
636
637 requirements, gen = streamclone.generatebundlev1(repo)
637 requirements, gen = streamclone.generatebundlev1(repo)
638 changegroup.writechunks(ui, gen, fname)
638 changegroup.writechunks(ui, gen, fname)
639
639
640 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
640 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
641
641
642
642
643 @command(
643 @command(
644 b'debugdag',
644 b'debugdag',
645 [
645 [
646 (b't', b'tags', None, _(b'use tags as labels')),
646 (b't', b'tags', None, _(b'use tags as labels')),
647 (b'b', b'branches', None, _(b'annotate with branch names')),
647 (b'b', b'branches', None, _(b'annotate with branch names')),
648 (b'', b'dots', None, _(b'use dots for runs')),
648 (b'', b'dots', None, _(b'use dots for runs')),
649 (b's', b'spaces', None, _(b'separate elements by spaces')),
649 (b's', b'spaces', None, _(b'separate elements by spaces')),
650 ],
650 ],
651 _(b'[OPTION]... [FILE [REV]...]'),
651 _(b'[OPTION]... [FILE [REV]...]'),
652 optionalrepo=True,
652 optionalrepo=True,
653 )
653 )
654 def debugdag(ui, repo, file_=None, *revs, **opts):
654 def debugdag(ui, repo, file_=None, *revs, **opts):
655 """format the changelog or an index DAG as a concise textual description
655 """format the changelog or an index DAG as a concise textual description
656
656
657 If you pass a revlog index, the revlog's DAG is emitted. If you list
657 If you pass a revlog index, the revlog's DAG is emitted. If you list
658 revision numbers, they get labeled in the output as rN.
658 revision numbers, they get labeled in the output as rN.
659
659
660 Otherwise, the changelog DAG of the current repo is emitted.
660 Otherwise, the changelog DAG of the current repo is emitted.
661 """
661 """
662 spaces = opts.get('spaces')
662 spaces = opts.get('spaces')
663 dots = opts.get('dots')
663 dots = opts.get('dots')
664 if file_:
664 if file_:
665 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
665 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
666 revs = {int(r) for r in revs}
666 revs = {int(r) for r in revs}
667
667
668 def events():
668 def events():
669 for r in rlog:
669 for r in rlog:
670 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
670 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
671 if r in revs:
671 if r in revs:
672 yield b'l', (r, b"r%i" % r)
672 yield b'l', (r, b"r%i" % r)
673
673
674 elif repo:
674 elif repo:
675 cl = repo.changelog
675 cl = repo.changelog
676 tags = opts.get('tags')
676 tags = opts.get('tags')
677 branches = opts.get('branches')
677 branches = opts.get('branches')
678 if tags:
678 if tags:
679 labels = {}
679 labels = {}
680 for l, n in repo.tags().items():
680 for l, n in repo.tags().items():
681 labels.setdefault(cl.rev(n), []).append(l)
681 labels.setdefault(cl.rev(n), []).append(l)
682
682
683 def events():
683 def events():
684 b = b"default"
684 b = b"default"
685 for r in cl:
685 for r in cl:
686 if branches:
686 if branches:
687 newb = cl.read(cl.node(r))[5][b'branch']
687 newb = cl.read(cl.node(r))[5][b'branch']
688 if newb != b:
688 if newb != b:
689 yield b'a', newb
689 yield b'a', newb
690 b = newb
690 b = newb
691 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
691 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
692 if tags:
692 if tags:
693 ls = labels.get(r)
693 ls = labels.get(r)
694 if ls:
694 if ls:
695 for l in ls:
695 for l in ls:
696 yield b'l', (r, l)
696 yield b'l', (r, l)
697
697
698 else:
698 else:
699 raise error.Abort(_(b'need repo for changelog dag'))
699 raise error.Abort(_(b'need repo for changelog dag'))
700
700
701 for line in dagparser.dagtextlines(
701 for line in dagparser.dagtextlines(
702 events(),
702 events(),
703 addspaces=spaces,
703 addspaces=spaces,
704 wraplabels=True,
704 wraplabels=True,
705 wrapannotations=True,
705 wrapannotations=True,
706 wrapnonlinear=dots,
706 wrapnonlinear=dots,
707 usedots=dots,
707 usedots=dots,
708 maxlinewidth=70,
708 maxlinewidth=70,
709 ):
709 ):
710 ui.write(line)
710 ui.write(line)
711 ui.write(b"\n")
711 ui.write(b"\n")
712
712
713
713
714 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
714 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
715 def debugdata(ui, repo, file_, rev=None, **opts):
715 def debugdata(ui, repo, file_, rev=None, **opts):
716 """dump the contents of a data file revision"""
716 """dump the contents of a data file revision"""
717 opts = pycompat.byteskwargs(opts)
717 opts = pycompat.byteskwargs(opts)
718 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
718 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
719 if rev is not None:
719 if rev is not None:
720 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
720 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
721 file_, rev = None, file_
721 file_, rev = None, file_
722 elif rev is None:
722 elif rev is None:
723 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
723 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
724 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
724 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
725 try:
725 try:
726 ui.write(r.rawdata(r.lookup(rev)))
726 ui.write(r.rawdata(r.lookup(rev)))
727 except KeyError:
727 except KeyError:
728 raise error.Abort(_(b'invalid revision identifier %s') % rev)
728 raise error.Abort(_(b'invalid revision identifier %s') % rev)
729
729
730
730
731 @command(
731 @command(
732 b'debugdate',
732 b'debugdate',
733 [(b'e', b'extended', None, _(b'try extended date formats'))],
733 [(b'e', b'extended', None, _(b'try extended date formats'))],
734 _(b'[-e] DATE [RANGE]'),
734 _(b'[-e] DATE [RANGE]'),
735 norepo=True,
735 norepo=True,
736 optionalrepo=True,
736 optionalrepo=True,
737 )
737 )
738 def debugdate(ui, date, range=None, **opts):
738 def debugdate(ui, date, range=None, **opts):
739 """parse and display a date"""
739 """parse and display a date"""
740 if opts["extended"]:
740 if opts["extended"]:
741 d = dateutil.parsedate(date, dateutil.extendeddateformats)
741 d = dateutil.parsedate(date, dateutil.extendeddateformats)
742 else:
742 else:
743 d = dateutil.parsedate(date)
743 d = dateutil.parsedate(date)
744 ui.writenoi18n(b"internal: %d %d\n" % d)
744 ui.writenoi18n(b"internal: %d %d\n" % d)
745 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
745 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
746 if range:
746 if range:
747 m = dateutil.matchdate(range)
747 m = dateutil.matchdate(range)
748 ui.writenoi18n(b"match: %s\n" % m(d[0]))
748 ui.writenoi18n(b"match: %s\n" % m(d[0]))
749
749
750
750
751 @command(
751 @command(
752 b'debugdeltachain',
752 b'debugdeltachain',
753 cmdutil.debugrevlogopts + cmdutil.formatteropts,
753 cmdutil.debugrevlogopts + cmdutil.formatteropts,
754 _(b'-c|-m|FILE'),
754 _(b'-c|-m|FILE'),
755 optionalrepo=True,
755 optionalrepo=True,
756 )
756 )
757 def debugdeltachain(ui, repo, file_=None, **opts):
757 def debugdeltachain(ui, repo, file_=None, **opts):
758 """dump information about delta chains in a revlog
758 """dump information about delta chains in a revlog
759
759
760 Output can be templatized. Available template keywords are:
760 Output can be templatized. Available template keywords are:
761
761
762 :``rev``: revision number
762 :``rev``: revision number
763 :``p1``: parent 1 revision number (for reference)
763 :``p1``: parent 1 revision number (for reference)
764 :``p2``: parent 2 revision number (for reference)
764 :``p2``: parent 2 revision number (for reference)
765 :``chainid``: delta chain identifier (numbered by unique base)
765 :``chainid``: delta chain identifier (numbered by unique base)
766 :``chainlen``: delta chain length to this revision
766 :``chainlen``: delta chain length to this revision
767 :``prevrev``: previous revision in delta chain
767 :``prevrev``: previous revision in delta chain
768 :``deltatype``: role of delta / how it was computed
768 :``deltatype``: role of delta / how it was computed
769 - base: a full snapshot
769 - base: a full snapshot
770 - snap: an intermediate snapshot
770 - snap: an intermediate snapshot
771 - p1: a delta against the first parent
771 - p1: a delta against the first parent
772 - p2: a delta against the second parent
772 - p2: a delta against the second parent
773 - skip1: a delta against the same base as p1
773 - skip1: a delta against the same base as p1
774 (when p1 has empty delta
774 (when p1 has empty delta
775 - skip2: a delta against the same base as p2
775 - skip2: a delta against the same base as p2
776 (when p2 has empty delta
776 (when p2 has empty delta
777 - prev: a delta against the previous revision
777 - prev: a delta against the previous revision
778 - other: a delta against an arbitrary revision
778 - other: a delta against an arbitrary revision
779 :``compsize``: compressed size of revision
779 :``compsize``: compressed size of revision
780 :``uncompsize``: uncompressed size of revision
780 :``uncompsize``: uncompressed size of revision
781 :``chainsize``: total size of compressed revisions in chain
781 :``chainsize``: total size of compressed revisions in chain
782 :``chainratio``: total chain size divided by uncompressed revision size
782 :``chainratio``: total chain size divided by uncompressed revision size
783 (new delta chains typically start at ratio 2.00)
783 (new delta chains typically start at ratio 2.00)
784 :``lindist``: linear distance from base revision in delta chain to end
784 :``lindist``: linear distance from base revision in delta chain to end
785 of this revision
785 of this revision
786 :``extradist``: total size of revisions not part of this delta chain from
786 :``extradist``: total size of revisions not part of this delta chain from
787 base of delta chain to end of this revision; a measurement
787 base of delta chain to end of this revision; a measurement
788 of how much extra data we need to read/seek across to read
788 of how much extra data we need to read/seek across to read
789 the delta chain for this revision
789 the delta chain for this revision
790 :``extraratio``: extradist divided by chainsize; another representation of
790 :``extraratio``: extradist divided by chainsize; another representation of
791 how much unrelated data is needed to load this delta chain
791 how much unrelated data is needed to load this delta chain
792
792
793 If the repository is configured to use the sparse read, additional keywords
793 If the repository is configured to use the sparse read, additional keywords
794 are available:
794 are available:
795
795
796 :``readsize``: total size of data read from the disk for a revision
796 :``readsize``: total size of data read from the disk for a revision
797 (sum of the sizes of all the blocks)
797 (sum of the sizes of all the blocks)
798 :``largestblock``: size of the largest block of data read from the disk
798 :``largestblock``: size of the largest block of data read from the disk
799 :``readdensity``: density of useful bytes in the data read from the disk
799 :``readdensity``: density of useful bytes in the data read from the disk
800 :``srchunks``: in how many data hunks the whole revision would be read
800 :``srchunks``: in how many data hunks the whole revision would be read
801
801
802 The sparse read can be enabled with experimental.sparse-read = True
802 The sparse read can be enabled with experimental.sparse-read = True
803 """
803 """
804 opts = pycompat.byteskwargs(opts)
804 opts = pycompat.byteskwargs(opts)
805 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
805 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
806 index = r.index
806 index = r.index
807 start = r.start
807 start = r.start
808 length = r.length
808 length = r.length
809 generaldelta = r._generaldelta
809 generaldelta = r._generaldelta
810 withsparseread = getattr(r, '_withsparseread', False)
810 withsparseread = getattr(r, '_withsparseread', False)
811
811
812 # security to avoid crash on corrupted revlogs
812 # security to avoid crash on corrupted revlogs
813 total_revs = len(index)
813 total_revs = len(index)
814
814
815 def revinfo(rev):
815 def revinfo(rev):
816 e = index[rev]
816 e = index[rev]
817 compsize = e[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH]
817 compsize = e[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH]
818 uncompsize = e[revlog_constants.ENTRY_DATA_UNCOMPRESSED_LENGTH]
818 uncompsize = e[revlog_constants.ENTRY_DATA_UNCOMPRESSED_LENGTH]
819 chainsize = 0
819 chainsize = 0
820
820
821 base = e[revlog_constants.ENTRY_DELTA_BASE]
821 base = e[revlog_constants.ENTRY_DELTA_BASE]
822 p1 = e[revlog_constants.ENTRY_PARENT_1]
822 p1 = e[revlog_constants.ENTRY_PARENT_1]
823 p2 = e[revlog_constants.ENTRY_PARENT_2]
823 p2 = e[revlog_constants.ENTRY_PARENT_2]
824
824
825 # If the parents of a revision has an empty delta, we never try to delta
825 # If the parents of a revision has an empty delta, we never try to delta
826 # against that parent, but directly against the delta base of that
826 # against that parent, but directly against the delta base of that
827 # parent (recursively). It avoids adding a useless entry in the chain.
827 # parent (recursively). It avoids adding a useless entry in the chain.
828 #
828 #
829 # However we need to detect that as a special case for delta-type, that
829 # However we need to detect that as a special case for delta-type, that
830 # is not simply "other".
830 # is not simply "other".
831 p1_base = p1
831 p1_base = p1
832 if p1 != nullrev and p1 < total_revs:
832 if p1 != nullrev and p1 < total_revs:
833 e1 = index[p1]
833 e1 = index[p1]
834 while e1[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH] == 0:
834 while e1[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH] == 0:
835 new_base = e1[revlog_constants.ENTRY_DELTA_BASE]
835 new_base = e1[revlog_constants.ENTRY_DELTA_BASE]
836 if (
836 if (
837 new_base == p1_base
837 new_base == p1_base
838 or new_base == nullrev
838 or new_base == nullrev
839 or new_base >= total_revs
839 or new_base >= total_revs
840 ):
840 ):
841 break
841 break
842 p1_base = new_base
842 p1_base = new_base
843 e1 = index[p1_base]
843 e1 = index[p1_base]
844 p2_base = p2
844 p2_base = p2
845 if p2 != nullrev and p2 < total_revs:
845 if p2 != nullrev and p2 < total_revs:
846 e2 = index[p2]
846 e2 = index[p2]
847 while e2[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH] == 0:
847 while e2[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH] == 0:
848 new_base = e2[revlog_constants.ENTRY_DELTA_BASE]
848 new_base = e2[revlog_constants.ENTRY_DELTA_BASE]
849 if (
849 if (
850 new_base == p2_base
850 new_base == p2_base
851 or new_base == nullrev
851 or new_base == nullrev
852 or new_base >= total_revs
852 or new_base >= total_revs
853 ):
853 ):
854 break
854 break
855 p2_base = new_base
855 p2_base = new_base
856 e2 = index[p2_base]
856 e2 = index[p2_base]
857
857
858 if generaldelta:
858 if generaldelta:
859 if base == p1:
859 if base == p1:
860 deltatype = b'p1'
860 deltatype = b'p1'
861 elif base == p2:
861 elif base == p2:
862 deltatype = b'p2'
862 deltatype = b'p2'
863 elif base == rev:
863 elif base == rev:
864 deltatype = b'base'
864 deltatype = b'base'
865 elif base == p1_base:
865 elif base == p1_base:
866 deltatype = b'skip1'
866 deltatype = b'skip1'
867 elif base == p2_base:
867 elif base == p2_base:
868 deltatype = b'skip2'
868 deltatype = b'skip2'
869 elif r.issnapshot(rev):
869 elif r.issnapshot(rev):
870 deltatype = b'snap'
870 deltatype = b'snap'
871 elif base == rev - 1:
871 elif base == rev - 1:
872 deltatype = b'prev'
872 deltatype = b'prev'
873 else:
873 else:
874 deltatype = b'other'
874 deltatype = b'other'
875 else:
875 else:
876 if base == rev:
876 if base == rev:
877 deltatype = b'base'
877 deltatype = b'base'
878 else:
878 else:
879 deltatype = b'prev'
879 deltatype = b'prev'
880
880
881 chain = r._deltachain(rev)[0]
881 chain = r._deltachain(rev)[0]
882 for iterrev in chain:
882 for iterrev in chain:
883 e = index[iterrev]
883 e = index[iterrev]
884 chainsize += e[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH]
884 chainsize += e[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH]
885
885
886 return p1, p2, compsize, uncompsize, deltatype, chain, chainsize
886 return p1, p2, compsize, uncompsize, deltatype, chain, chainsize
887
887
888 fm = ui.formatter(b'debugdeltachain', opts)
888 fm = ui.formatter(b'debugdeltachain', opts)
889
889
890 fm.plain(
890 fm.plain(
891 b' rev p1 p2 chain# chainlen prev delta '
891 b' rev p1 p2 chain# chainlen prev delta '
892 b'size rawsize chainsize ratio lindist extradist '
892 b'size rawsize chainsize ratio lindist extradist '
893 b'extraratio'
893 b'extraratio'
894 )
894 )
895 if withsparseread:
895 if withsparseread:
896 fm.plain(b' readsize largestblk rddensity srchunks')
896 fm.plain(b' readsize largestblk rddensity srchunks')
897 fm.plain(b'\n')
897 fm.plain(b'\n')
898
898
899 chainbases = {}
899 chainbases = {}
900 for rev in r:
900 for rev in r:
901 p1, p2, comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
901 p1, p2, comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
902 chainbase = chain[0]
902 chainbase = chain[0]
903 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
903 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
904 basestart = start(chainbase)
904 basestart = start(chainbase)
905 revstart = start(rev)
905 revstart = start(rev)
906 lineardist = revstart + comp - basestart
906 lineardist = revstart + comp - basestart
907 extradist = lineardist - chainsize
907 extradist = lineardist - chainsize
908 try:
908 try:
909 prevrev = chain[-2]
909 prevrev = chain[-2]
910 except IndexError:
910 except IndexError:
911 prevrev = -1
911 prevrev = -1
912
912
913 if uncomp != 0:
913 if uncomp != 0:
914 chainratio = float(chainsize) / float(uncomp)
914 chainratio = float(chainsize) / float(uncomp)
915 else:
915 else:
916 chainratio = chainsize
916 chainratio = chainsize
917
917
918 if chainsize != 0:
918 if chainsize != 0:
919 extraratio = float(extradist) / float(chainsize)
919 extraratio = float(extradist) / float(chainsize)
920 else:
920 else:
921 extraratio = extradist
921 extraratio = extradist
922
922
923 fm.startitem()
923 fm.startitem()
924 fm.write(
924 fm.write(
925 b'rev p1 p2 chainid chainlen prevrev deltatype compsize '
925 b'rev p1 p2 chainid chainlen prevrev deltatype compsize '
926 b'uncompsize chainsize chainratio lindist extradist '
926 b'uncompsize chainsize chainratio lindist extradist '
927 b'extraratio',
927 b'extraratio',
928 b'%7d %7d %7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
928 b'%7d %7d %7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
929 rev,
929 rev,
930 p1,
930 p1,
931 p2,
931 p2,
932 chainid,
932 chainid,
933 len(chain),
933 len(chain),
934 prevrev,
934 prevrev,
935 deltatype,
935 deltatype,
936 comp,
936 comp,
937 uncomp,
937 uncomp,
938 chainsize,
938 chainsize,
939 chainratio,
939 chainratio,
940 lineardist,
940 lineardist,
941 extradist,
941 extradist,
942 extraratio,
942 extraratio,
943 rev=rev,
943 rev=rev,
944 chainid=chainid,
944 chainid=chainid,
945 chainlen=len(chain),
945 chainlen=len(chain),
946 prevrev=prevrev,
946 prevrev=prevrev,
947 deltatype=deltatype,
947 deltatype=deltatype,
948 compsize=comp,
948 compsize=comp,
949 uncompsize=uncomp,
949 uncompsize=uncomp,
950 chainsize=chainsize,
950 chainsize=chainsize,
951 chainratio=chainratio,
951 chainratio=chainratio,
952 lindist=lineardist,
952 lindist=lineardist,
953 extradist=extradist,
953 extradist=extradist,
954 extraratio=extraratio,
954 extraratio=extraratio,
955 )
955 )
956 if withsparseread:
956 if withsparseread:
957 readsize = 0
957 readsize = 0
958 largestblock = 0
958 largestblock = 0
959 srchunks = 0
959 srchunks = 0
960
960
961 for revschunk in deltautil.slicechunk(r, chain):
961 for revschunk in deltautil.slicechunk(r, chain):
962 srchunks += 1
962 srchunks += 1
963 blkend = start(revschunk[-1]) + length(revschunk[-1])
963 blkend = start(revschunk[-1]) + length(revschunk[-1])
964 blksize = blkend - start(revschunk[0])
964 blksize = blkend - start(revschunk[0])
965
965
966 readsize += blksize
966 readsize += blksize
967 if largestblock < blksize:
967 if largestblock < blksize:
968 largestblock = blksize
968 largestblock = blksize
969
969
970 if readsize:
970 if readsize:
971 readdensity = float(chainsize) / float(readsize)
971 readdensity = float(chainsize) / float(readsize)
972 else:
972 else:
973 readdensity = 1
973 readdensity = 1
974
974
975 fm.write(
975 fm.write(
976 b'readsize largestblock readdensity srchunks',
976 b'readsize largestblock readdensity srchunks',
977 b' %10d %10d %9.5f %8d',
977 b' %10d %10d %9.5f %8d',
978 readsize,
978 readsize,
979 largestblock,
979 largestblock,
980 readdensity,
980 readdensity,
981 srchunks,
981 srchunks,
982 readsize=readsize,
982 readsize=readsize,
983 largestblock=largestblock,
983 largestblock=largestblock,
984 readdensity=readdensity,
984 readdensity=readdensity,
985 srchunks=srchunks,
985 srchunks=srchunks,
986 )
986 )
987
987
988 fm.plain(b'\n')
988 fm.plain(b'\n')
989
989
990 fm.end()
990 fm.end()
991
991
992
992
993 @command(
993 @command(
994 b'debug-delta-find',
994 b'debug-delta-find',
995 cmdutil.debugrevlogopts + cmdutil.formatteropts,
995 cmdutil.debugrevlogopts + cmdutil.formatteropts,
996 _(b'-c|-m|FILE REV'),
996 _(b'-c|-m|FILE REV'),
997 optionalrepo=True,
997 optionalrepo=True,
998 )
998 )
999 def debugdeltafind(ui, repo, arg_1, arg_2=None, **opts):
999 def debugdeltafind(ui, repo, arg_1, arg_2=None, **opts):
1000 """display the computation to get to a valid delta for storing REV
1000 """display the computation to get to a valid delta for storing REV
1001
1001
1002 This command will replay the process used to find the "best" delta to store
1002 This command will replay the process used to find the "best" delta to store
1003 a revision and display information about all the steps used to get to that
1003 a revision and display information about all the steps used to get to that
1004 result.
1004 result.
1005
1005
1006 The revision use the revision number of the target storage (not changelog
1006 The revision use the revision number of the target storage (not changelog
1007 revision number).
1007 revision number).
1008
1008
1009 note: the process is initiated from a full text of the revision to store.
1009 note: the process is initiated from a full text of the revision to store.
1010 """
1010 """
1011 opts = pycompat.byteskwargs(opts)
1011 opts = pycompat.byteskwargs(opts)
1012 if arg_2 is None:
1012 if arg_2 is None:
1013 file_ = None
1013 file_ = None
1014 rev = arg_1
1014 rev = arg_1
1015 else:
1015 else:
1016 file_ = arg_1
1016 file_ = arg_1
1017 rev = arg_2
1017 rev = arg_2
1018
1018
1019 rev = int(rev)
1019 rev = int(rev)
1020
1020
1021 revlog = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
1021 revlog = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
1022
1022
1023 deltacomputer = deltautil.deltacomputer(
1023 deltacomputer = deltautil.deltacomputer(
1024 revlog,
1024 revlog,
1025 write_debug=ui.write,
1025 write_debug=ui.write,
1026 debug_search=True,
1026 debug_search=True,
1027 )
1027 )
1028
1028
1029 node = revlog.node(rev)
1029 node = revlog.node(rev)
1030 p1r, p2r = revlog.parentrevs(rev)
1030 p1r, p2r = revlog.parentrevs(rev)
1031 p1 = revlog.node(p1r)
1031 p1 = revlog.node(p1r)
1032 p2 = revlog.node(p2r)
1032 p2 = revlog.node(p2r)
1033 btext = [revlog.revision(rev)]
1033 btext = [revlog.revision(rev)]
1034 textlen = len(btext[0])
1034 textlen = len(btext[0])
1035 cachedelta = None
1035 cachedelta = None
1036 flags = revlog.flags(rev)
1036 flags = revlog.flags(rev)
1037
1037
1038 revinfo = revlogutils.revisioninfo(
1038 revinfo = revlogutils.revisioninfo(
1039 node,
1039 node,
1040 p1,
1040 p1,
1041 p2,
1041 p2,
1042 btext,
1042 btext,
1043 textlen,
1043 textlen,
1044 cachedelta,
1044 cachedelta,
1045 flags,
1045 flags,
1046 )
1046 )
1047
1047
1048 fh = revlog._datafp()
1048 fh = revlog._datafp()
1049 deltacomputer.finddeltainfo(revinfo, fh, target_rev=rev)
1049 deltacomputer.finddeltainfo(revinfo, fh, target_rev=rev)
1050
1050
1051
1051
1052 @command(
1052 @command(
1053 b'debugdirstate|debugstate',
1053 b'debugdirstate|debugstate',
1054 [
1054 [
1055 (
1055 (
1056 b'',
1056 b'',
1057 b'nodates',
1057 b'nodates',
1058 None,
1058 None,
1059 _(b'do not display the saved mtime (DEPRECATED)'),
1059 _(b'do not display the saved mtime (DEPRECATED)'),
1060 ),
1060 ),
1061 (b'', b'dates', True, _(b'display the saved mtime')),
1061 (b'', b'dates', True, _(b'display the saved mtime')),
1062 (b'', b'datesort', None, _(b'sort by saved mtime')),
1062 (b'', b'datesort', None, _(b'sort by saved mtime')),
1063 (
1063 (
1064 b'',
1064 b'',
1065 b'docket',
1065 b'docket',
1066 False,
1066 False,
1067 _(b'display the docket (metadata file) instead'),
1067 _(b'display the docket (metadata file) instead'),
1068 ),
1068 ),
1069 (
1069 (
1070 b'',
1070 b'',
1071 b'all',
1071 b'all',
1072 False,
1072 False,
1073 _(b'display dirstate-v2 tree nodes that would not exist in v1'),
1073 _(b'display dirstate-v2 tree nodes that would not exist in v1'),
1074 ),
1074 ),
1075 ],
1075 ],
1076 _(b'[OPTION]...'),
1076 _(b'[OPTION]...'),
1077 )
1077 )
1078 def debugstate(ui, repo, **opts):
1078 def debugstate(ui, repo, **opts):
1079 """show the contents of the current dirstate"""
1079 """show the contents of the current dirstate"""
1080
1080
1081 if opts.get("docket"):
1081 if opts.get("docket"):
1082 if not repo.dirstate._use_dirstate_v2:
1082 if not repo.dirstate._use_dirstate_v2:
1083 raise error.Abort(_(b'dirstate v1 does not have a docket'))
1083 raise error.Abort(_(b'dirstate v1 does not have a docket'))
1084
1084
1085 docket = repo.dirstate._map.docket
1085 docket = repo.dirstate._map.docket
1086 (
1086 (
1087 start_offset,
1087 start_offset,
1088 root_nodes,
1088 root_nodes,
1089 nodes_with_entry,
1089 nodes_with_entry,
1090 nodes_with_copy,
1090 nodes_with_copy,
1091 unused_bytes,
1091 unused_bytes,
1092 _unused,
1092 _unused,
1093 ignore_pattern,
1093 ignore_pattern,
1094 ) = dirstateutils.v2.TREE_METADATA.unpack(docket.tree_metadata)
1094 ) = dirstateutils.v2.TREE_METADATA.unpack(docket.tree_metadata)
1095
1095
1096 ui.write(_(b"size of dirstate data: %d\n") % docket.data_size)
1096 ui.write(_(b"size of dirstate data: %d\n") % docket.data_size)
1097 ui.write(_(b"data file uuid: %s\n") % docket.uuid)
1097 ui.write(_(b"data file uuid: %s\n") % docket.uuid)
1098 ui.write(_(b"start offset of root nodes: %d\n") % start_offset)
1098 ui.write(_(b"start offset of root nodes: %d\n") % start_offset)
1099 ui.write(_(b"number of root nodes: %d\n") % root_nodes)
1099 ui.write(_(b"number of root nodes: %d\n") % root_nodes)
1100 ui.write(_(b"nodes with entries: %d\n") % nodes_with_entry)
1100 ui.write(_(b"nodes with entries: %d\n") % nodes_with_entry)
1101 ui.write(_(b"nodes with copies: %d\n") % nodes_with_copy)
1101 ui.write(_(b"nodes with copies: %d\n") % nodes_with_copy)
1102 ui.write(_(b"number of unused bytes: %d\n") % unused_bytes)
1102 ui.write(_(b"number of unused bytes: %d\n") % unused_bytes)
1103 ui.write(
1103 ui.write(
1104 _(b"ignore pattern hash: %s\n") % binascii.hexlify(ignore_pattern)
1104 _(b"ignore pattern hash: %s\n") % binascii.hexlify(ignore_pattern)
1105 )
1105 )
1106 return
1106 return
1107
1107
1108 nodates = not opts['dates']
1108 nodates = not opts['dates']
1109 if opts.get('nodates') is not None:
1109 if opts.get('nodates') is not None:
1110 nodates = True
1110 nodates = True
1111 datesort = opts.get('datesort')
1111 datesort = opts.get('datesort')
1112
1112
1113 if datesort:
1113 if datesort:
1114
1114
1115 def keyfunc(entry):
1115 def keyfunc(entry):
1116 filename, _state, _mode, _size, mtime = entry
1116 filename, _state, _mode, _size, mtime = entry
1117 return (mtime, filename)
1117 return (mtime, filename)
1118
1118
1119 else:
1119 else:
1120 keyfunc = None # sort by filename
1120 keyfunc = None # sort by filename
1121 entries = list(repo.dirstate._map.debug_iter(all=opts['all']))
1121 entries = list(repo.dirstate._map.debug_iter(all=opts['all']))
1122 entries.sort(key=keyfunc)
1122 entries.sort(key=keyfunc)
1123 for entry in entries:
1123 for entry in entries:
1124 filename, state, mode, size, mtime = entry
1124 filename, state, mode, size, mtime = entry
1125 if mtime == -1:
1125 if mtime == -1:
1126 timestr = b'unset '
1126 timestr = b'unset '
1127 elif nodates:
1127 elif nodates:
1128 timestr = b'set '
1128 timestr = b'set '
1129 else:
1129 else:
1130 timestr = time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(mtime))
1130 timestr = time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(mtime))
1131 timestr = encoding.strtolocal(timestr)
1131 timestr = encoding.strtolocal(timestr)
1132 if mode & 0o20000:
1132 if mode & 0o20000:
1133 mode = b'lnk'
1133 mode = b'lnk'
1134 else:
1134 else:
1135 mode = b'%3o' % (mode & 0o777 & ~util.umask)
1135 mode = b'%3o' % (mode & 0o777 & ~util.umask)
1136 ui.write(b"%c %s %10d %s%s\n" % (state, mode, size, timestr, filename))
1136 ui.write(b"%c %s %10d %s%s\n" % (state, mode, size, timestr, filename))
1137 for f in repo.dirstate.copies():
1137 for f in repo.dirstate.copies():
1138 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
1138 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
1139
1139
1140
1140
1141 @command(
1141 @command(
1142 b'debugdirstateignorepatternshash',
1142 b'debugdirstateignorepatternshash',
1143 [],
1143 [],
1144 _(b''),
1144 _(b''),
1145 )
1145 )
1146 def debugdirstateignorepatternshash(ui, repo, **opts):
1146 def debugdirstateignorepatternshash(ui, repo, **opts):
1147 """show the hash of ignore patterns stored in dirstate if v2,
1147 """show the hash of ignore patterns stored in dirstate if v2,
1148 or nothing for dirstate-v2
1148 or nothing for dirstate-v2
1149 """
1149 """
1150 if repo.dirstate._use_dirstate_v2:
1150 if repo.dirstate._use_dirstate_v2:
1151 docket = repo.dirstate._map.docket
1151 docket = repo.dirstate._map.docket
1152 hash_len = 20 # 160 bits for SHA-1
1152 hash_len = 20 # 160 bits for SHA-1
1153 hash_bytes = docket.tree_metadata[-hash_len:]
1153 hash_bytes = docket.tree_metadata[-hash_len:]
1154 ui.write(binascii.hexlify(hash_bytes) + b'\n')
1154 ui.write(binascii.hexlify(hash_bytes) + b'\n')
1155
1155
1156
1156
1157 @command(
1157 @command(
1158 b'debugdiscovery',
1158 b'debugdiscovery',
1159 [
1159 [
1160 (b'', b'old', None, _(b'use old-style discovery')),
1160 (b'', b'old', None, _(b'use old-style discovery')),
1161 (
1161 (
1162 b'',
1162 b'',
1163 b'nonheads',
1163 b'nonheads',
1164 None,
1164 None,
1165 _(b'use old-style discovery with non-heads included'),
1165 _(b'use old-style discovery with non-heads included'),
1166 ),
1166 ),
1167 (b'', b'rev', [], b'restrict discovery to this set of revs'),
1167 (b'', b'rev', [], b'restrict discovery to this set of revs'),
1168 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
1168 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
1169 (
1169 (
1170 b'',
1170 b'',
1171 b'local-as-revs',
1171 b'local-as-revs',
1172 b"",
1172 b"",
1173 b'treat local has having these revisions only',
1173 b'treat local has having these revisions only',
1174 ),
1174 ),
1175 (
1175 (
1176 b'',
1176 b'',
1177 b'remote-as-revs',
1177 b'remote-as-revs',
1178 b"",
1178 b"",
1179 b'use local as remote, with only these revisions',
1179 b'use local as remote, with only these revisions',
1180 ),
1180 ),
1181 ]
1181 ]
1182 + cmdutil.remoteopts
1182 + cmdutil.remoteopts
1183 + cmdutil.formatteropts,
1183 + cmdutil.formatteropts,
1184 _(b'[--rev REV] [OTHER]'),
1184 _(b'[--rev REV] [OTHER]'),
1185 )
1185 )
1186 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
1186 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
1187 """runs the changeset discovery protocol in isolation
1187 """runs the changeset discovery protocol in isolation
1188
1188
1189 The local peer can be "replaced" by a subset of the local repository by
1189 The local peer can be "replaced" by a subset of the local repository by
1190 using the `--local-as-revs` flag. Int he same way, usual `remote` peer can
1190 using the `--local-as-revs` flag. Int he same way, usual `remote` peer can
1191 be "replaced" by a subset of the local repository using the
1191 be "replaced" by a subset of the local repository using the
1192 `--local-as-revs` flag. This is useful to efficiently debug pathological
1192 `--local-as-revs` flag. This is useful to efficiently debug pathological
1193 discovery situation.
1193 discovery situation.
1194
1194
1195 The following developer oriented config are relevant for people playing with this command:
1195 The following developer oriented config are relevant for people playing with this command:
1196
1196
1197 * devel.discovery.exchange-heads=True
1197 * devel.discovery.exchange-heads=True
1198
1198
1199 If False, the discovery will not start with
1199 If False, the discovery will not start with
1200 remote head fetching and local head querying.
1200 remote head fetching and local head querying.
1201
1201
1202 * devel.discovery.grow-sample=True
1202 * devel.discovery.grow-sample=True
1203
1203
1204 If False, the sample size used in set discovery will not be increased
1204 If False, the sample size used in set discovery will not be increased
1205 through the process
1205 through the process
1206
1206
1207 * devel.discovery.grow-sample.dynamic=True
1207 * devel.discovery.grow-sample.dynamic=True
1208
1208
1209 When discovery.grow-sample.dynamic is True, the default, the sample size is
1209 When discovery.grow-sample.dynamic is True, the default, the sample size is
1210 adapted to the shape of the undecided set (it is set to the max of:
1210 adapted to the shape of the undecided set (it is set to the max of:
1211 <target-size>, len(roots(undecided)), len(heads(undecided)
1211 <target-size>, len(roots(undecided)), len(heads(undecided)
1212
1212
1213 * devel.discovery.grow-sample.rate=1.05
1213 * devel.discovery.grow-sample.rate=1.05
1214
1214
1215 the rate at which the sample grow
1215 the rate at which the sample grow
1216
1216
1217 * devel.discovery.randomize=True
1217 * devel.discovery.randomize=True
1218
1218
1219 If andom sampling during discovery are deterministic. It is meant for
1219 If andom sampling during discovery are deterministic. It is meant for
1220 integration tests.
1220 integration tests.
1221
1221
1222 * devel.discovery.sample-size=200
1222 * devel.discovery.sample-size=200
1223
1223
1224 Control the initial size of the discovery sample
1224 Control the initial size of the discovery sample
1225
1225
1226 * devel.discovery.sample-size.initial=100
1226 * devel.discovery.sample-size.initial=100
1227
1227
1228 Control the initial size of the discovery for initial change
1228 Control the initial size of the discovery for initial change
1229 """
1229 """
1230 opts = pycompat.byteskwargs(opts)
1230 opts = pycompat.byteskwargs(opts)
1231 unfi = repo.unfiltered()
1231 unfi = repo.unfiltered()
1232
1232
1233 # setup potential extra filtering
1233 # setup potential extra filtering
1234 local_revs = opts[b"local_as_revs"]
1234 local_revs = opts[b"local_as_revs"]
1235 remote_revs = opts[b"remote_as_revs"]
1235 remote_revs = opts[b"remote_as_revs"]
1236
1236
1237 # make sure tests are repeatable
1237 # make sure tests are repeatable
1238 random.seed(int(opts[b'seed']))
1238 random.seed(int(opts[b'seed']))
1239
1239
1240 if not remote_revs:
1240 if not remote_revs:
1241
1241
1242 remoteurl, branches = urlutil.get_unique_pull_path(
1242 remoteurl, branches = urlutil.get_unique_pull_path(
1243 b'debugdiscovery', repo, ui, remoteurl
1243 b'debugdiscovery', repo, ui, remoteurl
1244 )
1244 )
1245 remote = hg.peer(repo, opts, remoteurl)
1245 remote = hg.peer(repo, opts, remoteurl)
1246 ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(remoteurl))
1246 ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(remoteurl))
1247 else:
1247 else:
1248 branches = (None, [])
1248 branches = (None, [])
1249 remote_filtered_revs = logcmdutil.revrange(
1249 remote_filtered_revs = logcmdutil.revrange(
1250 unfi, [b"not (::(%s))" % remote_revs]
1250 unfi, [b"not (::(%s))" % remote_revs]
1251 )
1251 )
1252 remote_filtered_revs = frozenset(remote_filtered_revs)
1252 remote_filtered_revs = frozenset(remote_filtered_revs)
1253
1253
1254 def remote_func(x):
1254 def remote_func(x):
1255 return remote_filtered_revs
1255 return remote_filtered_revs
1256
1256
1257 repoview.filtertable[b'debug-discovery-remote-filter'] = remote_func
1257 repoview.filtertable[b'debug-discovery-remote-filter'] = remote_func
1258
1258
1259 remote = repo.peer()
1259 remote = repo.peer()
1260 remote._repo = remote._repo.filtered(b'debug-discovery-remote-filter')
1260 remote._repo = remote._repo.filtered(b'debug-discovery-remote-filter')
1261
1261
1262 if local_revs:
1262 if local_revs:
1263 local_filtered_revs = logcmdutil.revrange(
1263 local_filtered_revs = logcmdutil.revrange(
1264 unfi, [b"not (::(%s))" % local_revs]
1264 unfi, [b"not (::(%s))" % local_revs]
1265 )
1265 )
1266 local_filtered_revs = frozenset(local_filtered_revs)
1266 local_filtered_revs = frozenset(local_filtered_revs)
1267
1267
1268 def local_func(x):
1268 def local_func(x):
1269 return local_filtered_revs
1269 return local_filtered_revs
1270
1270
1271 repoview.filtertable[b'debug-discovery-local-filter'] = local_func
1271 repoview.filtertable[b'debug-discovery-local-filter'] = local_func
1272 repo = repo.filtered(b'debug-discovery-local-filter')
1272 repo = repo.filtered(b'debug-discovery-local-filter')
1273
1273
1274 data = {}
1274 data = {}
1275 if opts.get(b'old'):
1275 if opts.get(b'old'):
1276
1276
1277 def doit(pushedrevs, remoteheads, remote=remote):
1277 def doit(pushedrevs, remoteheads, remote=remote):
1278 if not util.safehasattr(remote, b'branches'):
1278 if not util.safehasattr(remote, b'branches'):
1279 # enable in-client legacy support
1279 # enable in-client legacy support
1280 remote = localrepo.locallegacypeer(remote.local())
1280 remote = localrepo.locallegacypeer(remote.local())
1281 common, _in, hds = treediscovery.findcommonincoming(
1281 common, _in, hds = treediscovery.findcommonincoming(
1282 repo, remote, force=True, audit=data
1282 repo, remote, force=True, audit=data
1283 )
1283 )
1284 common = set(common)
1284 common = set(common)
1285 if not opts.get(b'nonheads'):
1285 if not opts.get(b'nonheads'):
1286 ui.writenoi18n(
1286 ui.writenoi18n(
1287 b"unpruned common: %s\n"
1287 b"unpruned common: %s\n"
1288 % b" ".join(sorted(short(n) for n in common))
1288 % b" ".join(sorted(short(n) for n in common))
1289 )
1289 )
1290
1290
1291 clnode = repo.changelog.node
1291 clnode = repo.changelog.node
1292 common = repo.revs(b'heads(::%ln)', common)
1292 common = repo.revs(b'heads(::%ln)', common)
1293 common = {clnode(r) for r in common}
1293 common = {clnode(r) for r in common}
1294 return common, hds
1294 return common, hds
1295
1295
1296 else:
1296 else:
1297
1297
1298 def doit(pushedrevs, remoteheads, remote=remote):
1298 def doit(pushedrevs, remoteheads, remote=remote):
1299 nodes = None
1299 nodes = None
1300 if pushedrevs:
1300 if pushedrevs:
1301 revs = logcmdutil.revrange(repo, pushedrevs)
1301 revs = logcmdutil.revrange(repo, pushedrevs)
1302 nodes = [repo[r].node() for r in revs]
1302 nodes = [repo[r].node() for r in revs]
1303 common, any, hds = setdiscovery.findcommonheads(
1303 common, any, hds = setdiscovery.findcommonheads(
1304 ui, repo, remote, ancestorsof=nodes, audit=data
1304 ui, repo, remote, ancestorsof=nodes, audit=data
1305 )
1305 )
1306 return common, hds
1306 return common, hds
1307
1307
1308 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
1308 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
1309 localrevs = opts[b'rev']
1309 localrevs = opts[b'rev']
1310
1310
1311 fm = ui.formatter(b'debugdiscovery', opts)
1311 fm = ui.formatter(b'debugdiscovery', opts)
1312 if fm.strict_format:
1312 if fm.strict_format:
1313
1313
1314 @contextlib.contextmanager
1314 @contextlib.contextmanager
1315 def may_capture_output():
1315 def may_capture_output():
1316 ui.pushbuffer()
1316 ui.pushbuffer()
1317 yield
1317 yield
1318 data[b'output'] = ui.popbuffer()
1318 data[b'output'] = ui.popbuffer()
1319
1319
1320 else:
1320 else:
1321 may_capture_output = util.nullcontextmanager
1321 may_capture_output = util.nullcontextmanager
1322 with may_capture_output():
1322 with may_capture_output():
1323 with util.timedcm('debug-discovery') as t:
1323 with util.timedcm('debug-discovery') as t:
1324 common, hds = doit(localrevs, remoterevs)
1324 common, hds = doit(localrevs, remoterevs)
1325
1325
1326 # compute all statistics
1326 # compute all statistics
1327 heads_common = set(common)
1327 heads_common = set(common)
1328 heads_remote = set(hds)
1328 heads_remote = set(hds)
1329 heads_local = set(repo.heads())
1329 heads_local = set(repo.heads())
1330 # note: they cannot be a local or remote head that is in common and not
1330 # note: they cannot be a local or remote head that is in common and not
1331 # itself a head of common.
1331 # itself a head of common.
1332 heads_common_local = heads_common & heads_local
1332 heads_common_local = heads_common & heads_local
1333 heads_common_remote = heads_common & heads_remote
1333 heads_common_remote = heads_common & heads_remote
1334 heads_common_both = heads_common & heads_remote & heads_local
1334 heads_common_both = heads_common & heads_remote & heads_local
1335
1335
1336 all = repo.revs(b'all()')
1336 all = repo.revs(b'all()')
1337 common = repo.revs(b'::%ln', common)
1337 common = repo.revs(b'::%ln', common)
1338 roots_common = repo.revs(b'roots(::%ld)', common)
1338 roots_common = repo.revs(b'roots(::%ld)', common)
1339 missing = repo.revs(b'not ::%ld', common)
1339 missing = repo.revs(b'not ::%ld', common)
1340 heads_missing = repo.revs(b'heads(%ld)', missing)
1340 heads_missing = repo.revs(b'heads(%ld)', missing)
1341 roots_missing = repo.revs(b'roots(%ld)', missing)
1341 roots_missing = repo.revs(b'roots(%ld)', missing)
1342 assert len(common) + len(missing) == len(all)
1342 assert len(common) + len(missing) == len(all)
1343
1343
1344 initial_undecided = repo.revs(
1344 initial_undecided = repo.revs(
1345 b'not (::%ln or %ln::)', heads_common_remote, heads_common_local
1345 b'not (::%ln or %ln::)', heads_common_remote, heads_common_local
1346 )
1346 )
1347 heads_initial_undecided = repo.revs(b'heads(%ld)', initial_undecided)
1347 heads_initial_undecided = repo.revs(b'heads(%ld)', initial_undecided)
1348 roots_initial_undecided = repo.revs(b'roots(%ld)', initial_undecided)
1348 roots_initial_undecided = repo.revs(b'roots(%ld)', initial_undecided)
1349 common_initial_undecided = initial_undecided & common
1349 common_initial_undecided = initial_undecided & common
1350 missing_initial_undecided = initial_undecided & missing
1350 missing_initial_undecided = initial_undecided & missing
1351
1351
1352 data[b'elapsed'] = t.elapsed
1352 data[b'elapsed'] = t.elapsed
1353 data[b'nb-common-heads'] = len(heads_common)
1353 data[b'nb-common-heads'] = len(heads_common)
1354 data[b'nb-common-heads-local'] = len(heads_common_local)
1354 data[b'nb-common-heads-local'] = len(heads_common_local)
1355 data[b'nb-common-heads-remote'] = len(heads_common_remote)
1355 data[b'nb-common-heads-remote'] = len(heads_common_remote)
1356 data[b'nb-common-heads-both'] = len(heads_common_both)
1356 data[b'nb-common-heads-both'] = len(heads_common_both)
1357 data[b'nb-common-roots'] = len(roots_common)
1357 data[b'nb-common-roots'] = len(roots_common)
1358 data[b'nb-head-local'] = len(heads_local)
1358 data[b'nb-head-local'] = len(heads_local)
1359 data[b'nb-head-local-missing'] = len(heads_local) - len(heads_common_local)
1359 data[b'nb-head-local-missing'] = len(heads_local) - len(heads_common_local)
1360 data[b'nb-head-remote'] = len(heads_remote)
1360 data[b'nb-head-remote'] = len(heads_remote)
1361 data[b'nb-head-remote-unknown'] = len(heads_remote) - len(
1361 data[b'nb-head-remote-unknown'] = len(heads_remote) - len(
1362 heads_common_remote
1362 heads_common_remote
1363 )
1363 )
1364 data[b'nb-revs'] = len(all)
1364 data[b'nb-revs'] = len(all)
1365 data[b'nb-revs-common'] = len(common)
1365 data[b'nb-revs-common'] = len(common)
1366 data[b'nb-revs-missing'] = len(missing)
1366 data[b'nb-revs-missing'] = len(missing)
1367 data[b'nb-missing-heads'] = len(heads_missing)
1367 data[b'nb-missing-heads'] = len(heads_missing)
1368 data[b'nb-missing-roots'] = len(roots_missing)
1368 data[b'nb-missing-roots'] = len(roots_missing)
1369 data[b'nb-ini_und'] = len(initial_undecided)
1369 data[b'nb-ini_und'] = len(initial_undecided)
1370 data[b'nb-ini_und-heads'] = len(heads_initial_undecided)
1370 data[b'nb-ini_und-heads'] = len(heads_initial_undecided)
1371 data[b'nb-ini_und-roots'] = len(roots_initial_undecided)
1371 data[b'nb-ini_und-roots'] = len(roots_initial_undecided)
1372 data[b'nb-ini_und-common'] = len(common_initial_undecided)
1372 data[b'nb-ini_und-common'] = len(common_initial_undecided)
1373 data[b'nb-ini_und-missing'] = len(missing_initial_undecided)
1373 data[b'nb-ini_und-missing'] = len(missing_initial_undecided)
1374
1374
1375 fm.startitem()
1375 fm.startitem()
1376 fm.data(**pycompat.strkwargs(data))
1376 fm.data(**pycompat.strkwargs(data))
1377 # display discovery summary
1377 # display discovery summary
1378 fm.plain(b"elapsed time: %(elapsed)f seconds\n" % data)
1378 fm.plain(b"elapsed time: %(elapsed)f seconds\n" % data)
1379 fm.plain(b"round-trips: %(total-roundtrips)9d\n" % data)
1379 fm.plain(b"round-trips: %(total-roundtrips)9d\n" % data)
1380 fm.plain(b"queries: %(total-queries)9d\n" % data)
1380 fm.plain(b"queries: %(total-queries)9d\n" % data)
1381 fm.plain(b"heads summary:\n")
1381 fm.plain(b"heads summary:\n")
1382 fm.plain(b" total common heads: %(nb-common-heads)9d\n" % data)
1382 fm.plain(b" total common heads: %(nb-common-heads)9d\n" % data)
1383 fm.plain(b" also local heads: %(nb-common-heads-local)9d\n" % data)
1383 fm.plain(b" also local heads: %(nb-common-heads-local)9d\n" % data)
1384 fm.plain(b" also remote heads: %(nb-common-heads-remote)9d\n" % data)
1384 fm.plain(b" also remote heads: %(nb-common-heads-remote)9d\n" % data)
1385 fm.plain(b" both: %(nb-common-heads-both)9d\n" % data)
1385 fm.plain(b" both: %(nb-common-heads-both)9d\n" % data)
1386 fm.plain(b" local heads: %(nb-head-local)9d\n" % data)
1386 fm.plain(b" local heads: %(nb-head-local)9d\n" % data)
1387 fm.plain(b" common: %(nb-common-heads-local)9d\n" % data)
1387 fm.plain(b" common: %(nb-common-heads-local)9d\n" % data)
1388 fm.plain(b" missing: %(nb-head-local-missing)9d\n" % data)
1388 fm.plain(b" missing: %(nb-head-local-missing)9d\n" % data)
1389 fm.plain(b" remote heads: %(nb-head-remote)9d\n" % data)
1389 fm.plain(b" remote heads: %(nb-head-remote)9d\n" % data)
1390 fm.plain(b" common: %(nb-common-heads-remote)9d\n" % data)
1390 fm.plain(b" common: %(nb-common-heads-remote)9d\n" % data)
1391 fm.plain(b" unknown: %(nb-head-remote-unknown)9d\n" % data)
1391 fm.plain(b" unknown: %(nb-head-remote-unknown)9d\n" % data)
1392 fm.plain(b"local changesets: %(nb-revs)9d\n" % data)
1392 fm.plain(b"local changesets: %(nb-revs)9d\n" % data)
1393 fm.plain(b" common: %(nb-revs-common)9d\n" % data)
1393 fm.plain(b" common: %(nb-revs-common)9d\n" % data)
1394 fm.plain(b" heads: %(nb-common-heads)9d\n" % data)
1394 fm.plain(b" heads: %(nb-common-heads)9d\n" % data)
1395 fm.plain(b" roots: %(nb-common-roots)9d\n" % data)
1395 fm.plain(b" roots: %(nb-common-roots)9d\n" % data)
1396 fm.plain(b" missing: %(nb-revs-missing)9d\n" % data)
1396 fm.plain(b" missing: %(nb-revs-missing)9d\n" % data)
1397 fm.plain(b" heads: %(nb-missing-heads)9d\n" % data)
1397 fm.plain(b" heads: %(nb-missing-heads)9d\n" % data)
1398 fm.plain(b" roots: %(nb-missing-roots)9d\n" % data)
1398 fm.plain(b" roots: %(nb-missing-roots)9d\n" % data)
1399 fm.plain(b" first undecided set: %(nb-ini_und)9d\n" % data)
1399 fm.plain(b" first undecided set: %(nb-ini_und)9d\n" % data)
1400 fm.plain(b" heads: %(nb-ini_und-heads)9d\n" % data)
1400 fm.plain(b" heads: %(nb-ini_und-heads)9d\n" % data)
1401 fm.plain(b" roots: %(nb-ini_und-roots)9d\n" % data)
1401 fm.plain(b" roots: %(nb-ini_und-roots)9d\n" % data)
1402 fm.plain(b" common: %(nb-ini_und-common)9d\n" % data)
1402 fm.plain(b" common: %(nb-ini_und-common)9d\n" % data)
1403 fm.plain(b" missing: %(nb-ini_und-missing)9d\n" % data)
1403 fm.plain(b" missing: %(nb-ini_und-missing)9d\n" % data)
1404
1404
1405 if ui.verbose:
1405 if ui.verbose:
1406 fm.plain(
1406 fm.plain(
1407 b"common heads: %s\n"
1407 b"common heads: %s\n"
1408 % b" ".join(sorted(short(n) for n in heads_common))
1408 % b" ".join(sorted(short(n) for n in heads_common))
1409 )
1409 )
1410 fm.end()
1410 fm.end()
1411
1411
1412
1412
1413 _chunksize = 4 << 10
1413 _chunksize = 4 << 10
1414
1414
1415
1415
1416 @command(
1416 @command(
1417 b'debugdownload',
1417 b'debugdownload',
1418 [
1418 [
1419 (b'o', b'output', b'', _(b'path')),
1419 (b'o', b'output', b'', _(b'path')),
1420 ],
1420 ],
1421 optionalrepo=True,
1421 optionalrepo=True,
1422 )
1422 )
1423 def debugdownload(ui, repo, url, output=None, **opts):
1423 def debugdownload(ui, repo, url, output=None, **opts):
1424 """download a resource using Mercurial logic and config"""
1424 """download a resource using Mercurial logic and config"""
1425 fh = urlmod.open(ui, url, output)
1425 fh = urlmod.open(ui, url, output)
1426
1426
1427 dest = ui
1427 dest = ui
1428 if output:
1428 if output:
1429 dest = open(output, b"wb", _chunksize)
1429 dest = open(output, b"wb", _chunksize)
1430 try:
1430 try:
1431 data = fh.read(_chunksize)
1431 data = fh.read(_chunksize)
1432 while data:
1432 while data:
1433 dest.write(data)
1433 dest.write(data)
1434 data = fh.read(_chunksize)
1434 data = fh.read(_chunksize)
1435 finally:
1435 finally:
1436 if output:
1436 if output:
1437 dest.close()
1437 dest.close()
1438
1438
1439
1439
1440 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1440 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1441 def debugextensions(ui, repo, **opts):
1441 def debugextensions(ui, repo, **opts):
1442 '''show information about active extensions'''
1442 '''show information about active extensions'''
1443 opts = pycompat.byteskwargs(opts)
1443 opts = pycompat.byteskwargs(opts)
1444 exts = extensions.extensions(ui)
1444 exts = extensions.extensions(ui)
1445 hgver = util.version()
1445 hgver = util.version()
1446 fm = ui.formatter(b'debugextensions', opts)
1446 fm = ui.formatter(b'debugextensions', opts)
1447 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1447 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1448 isinternal = extensions.ismoduleinternal(extmod)
1448 isinternal = extensions.ismoduleinternal(extmod)
1449 extsource = None
1449 extsource = None
1450
1450
1451 if util.safehasattr(extmod, '__file__'):
1451 if util.safehasattr(extmod, '__file__'):
1452 extsource = pycompat.fsencode(extmod.__file__)
1452 extsource = pycompat.fsencode(extmod.__file__)
1453 elif getattr(sys, 'oxidized', False):
1453 elif getattr(sys, 'oxidized', False):
1454 extsource = pycompat.sysexecutable
1454 extsource = pycompat.sysexecutable
1455 if isinternal:
1455 if isinternal:
1456 exttestedwith = [] # never expose magic string to users
1456 exttestedwith = [] # never expose magic string to users
1457 else:
1457 else:
1458 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1458 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1459 extbuglink = getattr(extmod, 'buglink', None)
1459 extbuglink = getattr(extmod, 'buglink', None)
1460
1460
1461 fm.startitem()
1461 fm.startitem()
1462
1462
1463 if ui.quiet or ui.verbose:
1463 if ui.quiet or ui.verbose:
1464 fm.write(b'name', b'%s\n', extname)
1464 fm.write(b'name', b'%s\n', extname)
1465 else:
1465 else:
1466 fm.write(b'name', b'%s', extname)
1466 fm.write(b'name', b'%s', extname)
1467 if isinternal or hgver in exttestedwith:
1467 if isinternal or hgver in exttestedwith:
1468 fm.plain(b'\n')
1468 fm.plain(b'\n')
1469 elif not exttestedwith:
1469 elif not exttestedwith:
1470 fm.plain(_(b' (untested!)\n'))
1470 fm.plain(_(b' (untested!)\n'))
1471 else:
1471 else:
1472 lasttestedversion = exttestedwith[-1]
1472 lasttestedversion = exttestedwith[-1]
1473 fm.plain(b' (%s!)\n' % lasttestedversion)
1473 fm.plain(b' (%s!)\n' % lasttestedversion)
1474
1474
1475 fm.condwrite(
1475 fm.condwrite(
1476 ui.verbose and extsource,
1476 ui.verbose and extsource,
1477 b'source',
1477 b'source',
1478 _(b' location: %s\n'),
1478 _(b' location: %s\n'),
1479 extsource or b"",
1479 extsource or b"",
1480 )
1480 )
1481
1481
1482 if ui.verbose:
1482 if ui.verbose:
1483 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1483 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1484 fm.data(bundled=isinternal)
1484 fm.data(bundled=isinternal)
1485
1485
1486 fm.condwrite(
1486 fm.condwrite(
1487 ui.verbose and exttestedwith,
1487 ui.verbose and exttestedwith,
1488 b'testedwith',
1488 b'testedwith',
1489 _(b' tested with: %s\n'),
1489 _(b' tested with: %s\n'),
1490 fm.formatlist(exttestedwith, name=b'ver'),
1490 fm.formatlist(exttestedwith, name=b'ver'),
1491 )
1491 )
1492
1492
1493 fm.condwrite(
1493 fm.condwrite(
1494 ui.verbose and extbuglink,
1494 ui.verbose and extbuglink,
1495 b'buglink',
1495 b'buglink',
1496 _(b' bug reporting: %s\n'),
1496 _(b' bug reporting: %s\n'),
1497 extbuglink or b"",
1497 extbuglink or b"",
1498 )
1498 )
1499
1499
1500 fm.end()
1500 fm.end()
1501
1501
1502
1502
1503 @command(
1503 @command(
1504 b'debugfileset',
1504 b'debugfileset',
1505 [
1505 [
1506 (
1506 (
1507 b'r',
1507 b'r',
1508 b'rev',
1508 b'rev',
1509 b'',
1509 b'',
1510 _(b'apply the filespec on this revision'),
1510 _(b'apply the filespec on this revision'),
1511 _(b'REV'),
1511 _(b'REV'),
1512 ),
1512 ),
1513 (
1513 (
1514 b'',
1514 b'',
1515 b'all-files',
1515 b'all-files',
1516 False,
1516 False,
1517 _(b'test files from all revisions and working directory'),
1517 _(b'test files from all revisions and working directory'),
1518 ),
1518 ),
1519 (
1519 (
1520 b's',
1520 b's',
1521 b'show-matcher',
1521 b'show-matcher',
1522 None,
1522 None,
1523 _(b'print internal representation of matcher'),
1523 _(b'print internal representation of matcher'),
1524 ),
1524 ),
1525 (
1525 (
1526 b'p',
1526 b'p',
1527 b'show-stage',
1527 b'show-stage',
1528 [],
1528 [],
1529 _(b'print parsed tree at the given stage'),
1529 _(b'print parsed tree at the given stage'),
1530 _(b'NAME'),
1530 _(b'NAME'),
1531 ),
1531 ),
1532 ],
1532 ],
1533 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1533 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1534 )
1534 )
1535 def debugfileset(ui, repo, expr, **opts):
1535 def debugfileset(ui, repo, expr, **opts):
1536 '''parse and apply a fileset specification'''
1536 '''parse and apply a fileset specification'''
1537 from . import fileset
1537 from . import fileset
1538
1538
1539 fileset.symbols # force import of fileset so we have predicates to optimize
1539 fileset.symbols # force import of fileset so we have predicates to optimize
1540 opts = pycompat.byteskwargs(opts)
1540 opts = pycompat.byteskwargs(opts)
1541 ctx = logcmdutil.revsingle(repo, opts.get(b'rev'), None)
1541 ctx = logcmdutil.revsingle(repo, opts.get(b'rev'), None)
1542
1542
1543 stages = [
1543 stages = [
1544 (b'parsed', pycompat.identity),
1544 (b'parsed', pycompat.identity),
1545 (b'analyzed', filesetlang.analyze),
1545 (b'analyzed', filesetlang.analyze),
1546 (b'optimized', filesetlang.optimize),
1546 (b'optimized', filesetlang.optimize),
1547 ]
1547 ]
1548 stagenames = {n for n, f in stages}
1548 stagenames = {n for n, f in stages}
1549
1549
1550 showalways = set()
1550 showalways = set()
1551 if ui.verbose and not opts[b'show_stage']:
1551 if ui.verbose and not opts[b'show_stage']:
1552 # show parsed tree by --verbose (deprecated)
1552 # show parsed tree by --verbose (deprecated)
1553 showalways.add(b'parsed')
1553 showalways.add(b'parsed')
1554 if opts[b'show_stage'] == [b'all']:
1554 if opts[b'show_stage'] == [b'all']:
1555 showalways.update(stagenames)
1555 showalways.update(stagenames)
1556 else:
1556 else:
1557 for n in opts[b'show_stage']:
1557 for n in opts[b'show_stage']:
1558 if n not in stagenames:
1558 if n not in stagenames:
1559 raise error.Abort(_(b'invalid stage name: %s') % n)
1559 raise error.Abort(_(b'invalid stage name: %s') % n)
1560 showalways.update(opts[b'show_stage'])
1560 showalways.update(opts[b'show_stage'])
1561
1561
1562 tree = filesetlang.parse(expr)
1562 tree = filesetlang.parse(expr)
1563 for n, f in stages:
1563 for n, f in stages:
1564 tree = f(tree)
1564 tree = f(tree)
1565 if n in showalways:
1565 if n in showalways:
1566 if opts[b'show_stage'] or n != b'parsed':
1566 if opts[b'show_stage'] or n != b'parsed':
1567 ui.write(b"* %s:\n" % n)
1567 ui.write(b"* %s:\n" % n)
1568 ui.write(filesetlang.prettyformat(tree), b"\n")
1568 ui.write(filesetlang.prettyformat(tree), b"\n")
1569
1569
1570 files = set()
1570 files = set()
1571 if opts[b'all_files']:
1571 if opts[b'all_files']:
1572 for r in repo:
1572 for r in repo:
1573 c = repo[r]
1573 c = repo[r]
1574 files.update(c.files())
1574 files.update(c.files())
1575 files.update(c.substate)
1575 files.update(c.substate)
1576 if opts[b'all_files'] or ctx.rev() is None:
1576 if opts[b'all_files'] or ctx.rev() is None:
1577 wctx = repo[None]
1577 wctx = repo[None]
1578 files.update(
1578 files.update(
1579 repo.dirstate.walk(
1579 repo.dirstate.walk(
1580 scmutil.matchall(repo),
1580 scmutil.matchall(repo),
1581 subrepos=list(wctx.substate),
1581 subrepos=list(wctx.substate),
1582 unknown=True,
1582 unknown=True,
1583 ignored=True,
1583 ignored=True,
1584 )
1584 )
1585 )
1585 )
1586 files.update(wctx.substate)
1586 files.update(wctx.substate)
1587 else:
1587 else:
1588 files.update(ctx.files())
1588 files.update(ctx.files())
1589 files.update(ctx.substate)
1589 files.update(ctx.substate)
1590
1590
1591 m = ctx.matchfileset(repo.getcwd(), expr)
1591 m = ctx.matchfileset(repo.getcwd(), expr)
1592 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1592 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1593 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1593 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1594 for f in sorted(files):
1594 for f in sorted(files):
1595 if not m(f):
1595 if not m(f):
1596 continue
1596 continue
1597 ui.write(b"%s\n" % f)
1597 ui.write(b"%s\n" % f)
1598
1598
1599
1599
1600 @command(
1600 @command(
1601 b"debug-repair-issue6528",
1601 b"debug-repair-issue6528",
1602 [
1602 [
1603 (
1603 (
1604 b'',
1604 b'',
1605 b'to-report',
1605 b'to-report',
1606 b'',
1606 b'',
1607 _(b'build a report of affected revisions to this file'),
1607 _(b'build a report of affected revisions to this file'),
1608 _(b'FILE'),
1608 _(b'FILE'),
1609 ),
1609 ),
1610 (
1610 (
1611 b'',
1611 b'',
1612 b'from-report',
1612 b'from-report',
1613 b'',
1613 b'',
1614 _(b'repair revisions listed in this report file'),
1614 _(b'repair revisions listed in this report file'),
1615 _(b'FILE'),
1615 _(b'FILE'),
1616 ),
1616 ),
1617 (
1617 (
1618 b'',
1618 b'',
1619 b'paranoid',
1619 b'paranoid',
1620 False,
1620 False,
1621 _(b'check that both detection methods do the same thing'),
1621 _(b'check that both detection methods do the same thing'),
1622 ),
1622 ),
1623 ]
1623 ]
1624 + cmdutil.dryrunopts,
1624 + cmdutil.dryrunopts,
1625 )
1625 )
1626 def debug_repair_issue6528(ui, repo, **opts):
1626 def debug_repair_issue6528(ui, repo, **opts):
1627 """find affected revisions and repair them. See issue6528 for more details.
1627 """find affected revisions and repair them. See issue6528 for more details.
1628
1628
1629 The `--to-report` and `--from-report` flags allow you to cache and reuse the
1629 The `--to-report` and `--from-report` flags allow you to cache and reuse the
1630 computation of affected revisions for a given repository across clones.
1630 computation of affected revisions for a given repository across clones.
1631 The report format is line-based (with empty lines ignored):
1631 The report format is line-based (with empty lines ignored):
1632
1632
1633 ```
1633 ```
1634 <ascii-hex of the affected revision>,... <unencoded filelog index filename>
1634 <ascii-hex of the affected revision>,... <unencoded filelog index filename>
1635 ```
1635 ```
1636
1636
1637 There can be multiple broken revisions per filelog, they are separated by
1637 There can be multiple broken revisions per filelog, they are separated by
1638 a comma with no spaces. The only space is between the revision(s) and the
1638 a comma with no spaces. The only space is between the revision(s) and the
1639 filename.
1639 filename.
1640
1640
1641 Note that this does *not* mean that this repairs future affected revisions,
1641 Note that this does *not* mean that this repairs future affected revisions,
1642 that needs a separate fix at the exchange level that was introduced in
1642 that needs a separate fix at the exchange level that was introduced in
1643 Mercurial 5.9.1.
1643 Mercurial 5.9.1.
1644
1644
1645 There is a `--paranoid` flag to test that the fast implementation is correct
1645 There is a `--paranoid` flag to test that the fast implementation is correct
1646 by checking it against the slow implementation. Since this matter is quite
1646 by checking it against the slow implementation. Since this matter is quite
1647 urgent and testing every edge-case is probably quite costly, we use this
1647 urgent and testing every edge-case is probably quite costly, we use this
1648 method to test on large repositories as a fuzzing method of sorts.
1648 method to test on large repositories as a fuzzing method of sorts.
1649 """
1649 """
1650 cmdutil.check_incompatible_arguments(
1650 cmdutil.check_incompatible_arguments(
1651 opts, 'to_report', ['from_report', 'dry_run']
1651 opts, 'to_report', ['from_report', 'dry_run']
1652 )
1652 )
1653 dry_run = opts.get('dry_run')
1653 dry_run = opts.get('dry_run')
1654 to_report = opts.get('to_report')
1654 to_report = opts.get('to_report')
1655 from_report = opts.get('from_report')
1655 from_report = opts.get('from_report')
1656 paranoid = opts.get('paranoid')
1656 paranoid = opts.get('paranoid')
1657 # TODO maybe add filelog pattern and revision pattern parameters to help
1657 # TODO maybe add filelog pattern and revision pattern parameters to help
1658 # narrow down the search for users that know what they're looking for?
1658 # narrow down the search for users that know what they're looking for?
1659
1659
1660 if requirements.REVLOGV1_REQUIREMENT not in repo.requirements:
1660 if requirements.REVLOGV1_REQUIREMENT not in repo.requirements:
1661 msg = b"can only repair revlogv1 repositories, v2 is not affected"
1661 msg = b"can only repair revlogv1 repositories, v2 is not affected"
1662 raise error.Abort(_(msg))
1662 raise error.Abort(_(msg))
1663
1663
1664 rewrite.repair_issue6528(
1664 rewrite.repair_issue6528(
1665 ui,
1665 ui,
1666 repo,
1666 repo,
1667 dry_run=dry_run,
1667 dry_run=dry_run,
1668 to_report=to_report,
1668 to_report=to_report,
1669 from_report=from_report,
1669 from_report=from_report,
1670 paranoid=paranoid,
1670 paranoid=paranoid,
1671 )
1671 )
1672
1672
1673
1673
1674 @command(b'debugformat', [] + cmdutil.formatteropts)
1674 @command(b'debugformat', [] + cmdutil.formatteropts)
1675 def debugformat(ui, repo, **opts):
1675 def debugformat(ui, repo, **opts):
1676 """display format information about the current repository
1676 """display format information about the current repository
1677
1677
1678 Use --verbose to get extra information about current config value and
1678 Use --verbose to get extra information about current config value and
1679 Mercurial default."""
1679 Mercurial default."""
1680 opts = pycompat.byteskwargs(opts)
1680 opts = pycompat.byteskwargs(opts)
1681 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1681 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1682 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1682 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1683
1683
1684 def makeformatname(name):
1684 def makeformatname(name):
1685 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1685 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1686
1686
1687 fm = ui.formatter(b'debugformat', opts)
1687 fm = ui.formatter(b'debugformat', opts)
1688 if fm.isplain():
1688 if fm.isplain():
1689
1689
1690 def formatvalue(value):
1690 def formatvalue(value):
1691 if util.safehasattr(value, b'startswith'):
1691 if util.safehasattr(value, b'startswith'):
1692 return value
1692 return value
1693 if value:
1693 if value:
1694 return b'yes'
1694 return b'yes'
1695 else:
1695 else:
1696 return b'no'
1696 return b'no'
1697
1697
1698 else:
1698 else:
1699 formatvalue = pycompat.identity
1699 formatvalue = pycompat.identity
1700
1700
1701 fm.plain(b'format-variant')
1701 fm.plain(b'format-variant')
1702 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1702 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1703 fm.plain(b' repo')
1703 fm.plain(b' repo')
1704 if ui.verbose:
1704 if ui.verbose:
1705 fm.plain(b' config default')
1705 fm.plain(b' config default')
1706 fm.plain(b'\n')
1706 fm.plain(b'\n')
1707 for fv in upgrade.allformatvariant:
1707 for fv in upgrade.allformatvariant:
1708 fm.startitem()
1708 fm.startitem()
1709 repovalue = fv.fromrepo(repo)
1709 repovalue = fv.fromrepo(repo)
1710 configvalue = fv.fromconfig(repo)
1710 configvalue = fv.fromconfig(repo)
1711
1711
1712 if repovalue != configvalue:
1712 if repovalue != configvalue:
1713 namelabel = b'formatvariant.name.mismatchconfig'
1713 namelabel = b'formatvariant.name.mismatchconfig'
1714 repolabel = b'formatvariant.repo.mismatchconfig'
1714 repolabel = b'formatvariant.repo.mismatchconfig'
1715 elif repovalue != fv.default:
1715 elif repovalue != fv.default:
1716 namelabel = b'formatvariant.name.mismatchdefault'
1716 namelabel = b'formatvariant.name.mismatchdefault'
1717 repolabel = b'formatvariant.repo.mismatchdefault'
1717 repolabel = b'formatvariant.repo.mismatchdefault'
1718 else:
1718 else:
1719 namelabel = b'formatvariant.name.uptodate'
1719 namelabel = b'formatvariant.name.uptodate'
1720 repolabel = b'formatvariant.repo.uptodate'
1720 repolabel = b'formatvariant.repo.uptodate'
1721
1721
1722 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1722 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1723 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1723 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1724 if fv.default != configvalue:
1724 if fv.default != configvalue:
1725 configlabel = b'formatvariant.config.special'
1725 configlabel = b'formatvariant.config.special'
1726 else:
1726 else:
1727 configlabel = b'formatvariant.config.default'
1727 configlabel = b'formatvariant.config.default'
1728 fm.condwrite(
1728 fm.condwrite(
1729 ui.verbose,
1729 ui.verbose,
1730 b'config',
1730 b'config',
1731 b' %6s',
1731 b' %6s',
1732 formatvalue(configvalue),
1732 formatvalue(configvalue),
1733 label=configlabel,
1733 label=configlabel,
1734 )
1734 )
1735 fm.condwrite(
1735 fm.condwrite(
1736 ui.verbose,
1736 ui.verbose,
1737 b'default',
1737 b'default',
1738 b' %7s',
1738 b' %7s',
1739 formatvalue(fv.default),
1739 formatvalue(fv.default),
1740 label=b'formatvariant.default',
1740 label=b'formatvariant.default',
1741 )
1741 )
1742 fm.plain(b'\n')
1742 fm.plain(b'\n')
1743 fm.end()
1743 fm.end()
1744
1744
1745
1745
1746 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1746 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1747 def debugfsinfo(ui, path=b"."):
1747 def debugfsinfo(ui, path=b"."):
1748 """show information detected about current filesystem"""
1748 """show information detected about current filesystem"""
1749 ui.writenoi18n(b'path: %s\n' % path)
1749 ui.writenoi18n(b'path: %s\n' % path)
1750 ui.writenoi18n(
1750 ui.writenoi18n(
1751 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1751 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1752 )
1752 )
1753 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1753 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1754 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1754 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1755 ui.writenoi18n(
1755 ui.writenoi18n(
1756 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1756 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1757 )
1757 )
1758 ui.writenoi18n(
1758 ui.writenoi18n(
1759 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1759 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1760 )
1760 )
1761 casesensitive = b'(unknown)'
1761 casesensitive = b'(unknown)'
1762 try:
1762 try:
1763 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1763 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1764 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1764 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1765 except OSError:
1765 except OSError:
1766 pass
1766 pass
1767 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1767 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1768
1768
1769
1769
1770 @command(
1770 @command(
1771 b'debuggetbundle',
1771 b'debuggetbundle',
1772 [
1772 [
1773 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1773 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1774 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1774 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1775 (
1775 (
1776 b't',
1776 b't',
1777 b'type',
1777 b'type',
1778 b'bzip2',
1778 b'bzip2',
1779 _(b'bundle compression type to use'),
1779 _(b'bundle compression type to use'),
1780 _(b'TYPE'),
1780 _(b'TYPE'),
1781 ),
1781 ),
1782 ],
1782 ],
1783 _(b'REPO FILE [-H|-C ID]...'),
1783 _(b'REPO FILE [-H|-C ID]...'),
1784 norepo=True,
1784 norepo=True,
1785 )
1785 )
1786 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1786 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1787 """retrieves a bundle from a repo
1787 """retrieves a bundle from a repo
1788
1788
1789 Every ID must be a full-length hex node id string. Saves the bundle to the
1789 Every ID must be a full-length hex node id string. Saves the bundle to the
1790 given file.
1790 given file.
1791 """
1791 """
1792 opts = pycompat.byteskwargs(opts)
1792 opts = pycompat.byteskwargs(opts)
1793 repo = hg.peer(ui, opts, repopath)
1793 repo = hg.peer(ui, opts, repopath)
1794 if not repo.capable(b'getbundle'):
1794 if not repo.capable(b'getbundle'):
1795 raise error.Abort(b"getbundle() not supported by target repository")
1795 raise error.Abort(b"getbundle() not supported by target repository")
1796 args = {}
1796 args = {}
1797 if common:
1797 if common:
1798 args['common'] = [bin(s) for s in common]
1798 args['common'] = [bin(s) for s in common]
1799 if head:
1799 if head:
1800 args['heads'] = [bin(s) for s in head]
1800 args['heads'] = [bin(s) for s in head]
1801 # TODO: get desired bundlecaps from command line.
1801 # TODO: get desired bundlecaps from command line.
1802 args['bundlecaps'] = None
1802 args['bundlecaps'] = None
1803 bundle = repo.getbundle(b'debug', **args)
1803 bundle = repo.getbundle(b'debug', **args)
1804
1804
1805 bundletype = opts.get(b'type', b'bzip2').lower()
1805 bundletype = opts.get(b'type', b'bzip2').lower()
1806 btypes = {
1806 btypes = {
1807 b'none': b'HG10UN',
1807 b'none': b'HG10UN',
1808 b'bzip2': b'HG10BZ',
1808 b'bzip2': b'HG10BZ',
1809 b'gzip': b'HG10GZ',
1809 b'gzip': b'HG10GZ',
1810 b'bundle2': b'HG20',
1810 b'bundle2': b'HG20',
1811 }
1811 }
1812 bundletype = btypes.get(bundletype)
1812 bundletype = btypes.get(bundletype)
1813 if bundletype not in bundle2.bundletypes:
1813 if bundletype not in bundle2.bundletypes:
1814 raise error.Abort(_(b'unknown bundle type specified with --type'))
1814 raise error.Abort(_(b'unknown bundle type specified with --type'))
1815 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1815 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1816
1816
1817
1817
1818 @command(b'debugignore', [], b'[FILE]')
1818 @command(b'debugignore', [], b'[FILE]')
1819 def debugignore(ui, repo, *files, **opts):
1819 def debugignore(ui, repo, *files, **opts):
1820 """display the combined ignore pattern and information about ignored files
1820 """display the combined ignore pattern and information about ignored files
1821
1821
1822 With no argument display the combined ignore pattern.
1822 With no argument display the combined ignore pattern.
1823
1823
1824 Given space separated file names, shows if the given file is ignored and
1824 Given space separated file names, shows if the given file is ignored and
1825 if so, show the ignore rule (file and line number) that matched it.
1825 if so, show the ignore rule (file and line number) that matched it.
1826 """
1826 """
1827 ignore = repo.dirstate._ignore
1827 ignore = repo.dirstate._ignore
1828 if not files:
1828 if not files:
1829 # Show all the patterns
1829 # Show all the patterns
1830 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1830 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1831 else:
1831 else:
1832 m = scmutil.match(repo[None], pats=files)
1832 m = scmutil.match(repo[None], pats=files)
1833 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1833 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1834 for f in m.files():
1834 for f in m.files():
1835 nf = util.normpath(f)
1835 nf = util.normpath(f)
1836 ignored = None
1836 ignored = None
1837 ignoredata = None
1837 ignoredata = None
1838 if nf != b'.':
1838 if nf != b'.':
1839 if ignore(nf):
1839 if ignore(nf):
1840 ignored = nf
1840 ignored = nf
1841 ignoredata = repo.dirstate._ignorefileandline(nf)
1841 ignoredata = repo.dirstate._ignorefileandline(nf)
1842 else:
1842 else:
1843 for p in pathutil.finddirs(nf):
1843 for p in pathutil.finddirs(nf):
1844 if ignore(p):
1844 if ignore(p):
1845 ignored = p
1845 ignored = p
1846 ignoredata = repo.dirstate._ignorefileandline(p)
1846 ignoredata = repo.dirstate._ignorefileandline(p)
1847 break
1847 break
1848 if ignored:
1848 if ignored:
1849 if ignored == nf:
1849 if ignored == nf:
1850 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1850 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1851 else:
1851 else:
1852 ui.write(
1852 ui.write(
1853 _(
1853 _(
1854 b"%s is ignored because of "
1854 b"%s is ignored because of "
1855 b"containing directory %s\n"
1855 b"containing directory %s\n"
1856 )
1856 )
1857 % (uipathfn(f), ignored)
1857 % (uipathfn(f), ignored)
1858 )
1858 )
1859 ignorefile, lineno, line = ignoredata
1859 ignorefile, lineno, line = ignoredata
1860 ui.write(
1860 ui.write(
1861 _(b"(ignore rule in %s, line %d: '%s')\n")
1861 _(b"(ignore rule in %s, line %d: '%s')\n")
1862 % (ignorefile, lineno, line)
1862 % (ignorefile, lineno, line)
1863 )
1863 )
1864 else:
1864 else:
1865 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1865 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1866
1866
1867
1867
1868 @command(
1868 @command(
1869 b'debug-revlog-index|debugindex',
1869 b'debug-revlog-index|debugindex',
1870 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1870 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1871 _(b'-c|-m|FILE'),
1871 _(b'-c|-m|FILE'),
1872 )
1872 )
1873 def debugindex(ui, repo, file_=None, **opts):
1873 def debugindex(ui, repo, file_=None, **opts):
1874 """dump index data for a revlog"""
1874 """dump index data for a revlog"""
1875 opts = pycompat.byteskwargs(opts)
1875 opts = pycompat.byteskwargs(opts)
1876 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1876 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1877
1877
1878 fm = ui.formatter(b'debugindex', opts)
1878 fm = ui.formatter(b'debugindex', opts)
1879
1879
1880 revlog = getattr(store, b'_revlog', store)
1881
1880 return revlog_debug.debug_index(
1882 return revlog_debug.debug_index(
1881 ui,
1883 ui,
1882 repo,
1884 repo,
1883 formatter=fm,
1885 formatter=fm,
1884 revlog=store,
1886 revlog=revlog,
1885 full_node=ui.debugflag,
1887 full_node=ui.debugflag,
1886 )
1888 )
1887
1889
1888
1890
1889 @command(
1891 @command(
1890 b'debugindexdot',
1892 b'debugindexdot',
1891 cmdutil.debugrevlogopts,
1893 cmdutil.debugrevlogopts,
1892 _(b'-c|-m|FILE'),
1894 _(b'-c|-m|FILE'),
1893 optionalrepo=True,
1895 optionalrepo=True,
1894 )
1896 )
1895 def debugindexdot(ui, repo, file_=None, **opts):
1897 def debugindexdot(ui, repo, file_=None, **opts):
1896 """dump an index DAG as a graphviz dot file"""
1898 """dump an index DAG as a graphviz dot file"""
1897 opts = pycompat.byteskwargs(opts)
1899 opts = pycompat.byteskwargs(opts)
1898 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1900 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1899 ui.writenoi18n(b"digraph G {\n")
1901 ui.writenoi18n(b"digraph G {\n")
1900 for i in r:
1902 for i in r:
1901 node = r.node(i)
1903 node = r.node(i)
1902 pp = r.parents(node)
1904 pp = r.parents(node)
1903 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1905 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1904 if pp[1] != repo.nullid:
1906 if pp[1] != repo.nullid:
1905 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1907 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1906 ui.write(b"}\n")
1908 ui.write(b"}\n")
1907
1909
1908
1910
1909 @command(b'debugindexstats', [])
1911 @command(b'debugindexstats', [])
1910 def debugindexstats(ui, repo):
1912 def debugindexstats(ui, repo):
1911 """show stats related to the changelog index"""
1913 """show stats related to the changelog index"""
1912 repo.changelog.shortest(repo.nullid, 1)
1914 repo.changelog.shortest(repo.nullid, 1)
1913 index = repo.changelog.index
1915 index = repo.changelog.index
1914 if not util.safehasattr(index, b'stats'):
1916 if not util.safehasattr(index, b'stats'):
1915 raise error.Abort(_(b'debugindexstats only works with native code'))
1917 raise error.Abort(_(b'debugindexstats only works with native code'))
1916 for k, v in sorted(index.stats().items()):
1918 for k, v in sorted(index.stats().items()):
1917 ui.write(b'%s: %d\n' % (k, v))
1919 ui.write(b'%s: %d\n' % (k, v))
1918
1920
1919
1921
1920 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1922 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1921 def debuginstall(ui, **opts):
1923 def debuginstall(ui, **opts):
1922 """test Mercurial installation
1924 """test Mercurial installation
1923
1925
1924 Returns 0 on success.
1926 Returns 0 on success.
1925 """
1927 """
1926 opts = pycompat.byteskwargs(opts)
1928 opts = pycompat.byteskwargs(opts)
1927
1929
1928 problems = 0
1930 problems = 0
1929
1931
1930 fm = ui.formatter(b'debuginstall', opts)
1932 fm = ui.formatter(b'debuginstall', opts)
1931 fm.startitem()
1933 fm.startitem()
1932
1934
1933 # encoding might be unknown or wrong. don't translate these messages.
1935 # encoding might be unknown or wrong. don't translate these messages.
1934 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1936 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1935 err = None
1937 err = None
1936 try:
1938 try:
1937 codecs.lookup(pycompat.sysstr(encoding.encoding))
1939 codecs.lookup(pycompat.sysstr(encoding.encoding))
1938 except LookupError as inst:
1940 except LookupError as inst:
1939 err = stringutil.forcebytestr(inst)
1941 err = stringutil.forcebytestr(inst)
1940 problems += 1
1942 problems += 1
1941 fm.condwrite(
1943 fm.condwrite(
1942 err,
1944 err,
1943 b'encodingerror',
1945 b'encodingerror',
1944 b" %s\n (check that your locale is properly set)\n",
1946 b" %s\n (check that your locale is properly set)\n",
1945 err,
1947 err,
1946 )
1948 )
1947
1949
1948 # Python
1950 # Python
1949 pythonlib = None
1951 pythonlib = None
1950 if util.safehasattr(os, '__file__'):
1952 if util.safehasattr(os, '__file__'):
1951 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1953 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1952 elif getattr(sys, 'oxidized', False):
1954 elif getattr(sys, 'oxidized', False):
1953 pythonlib = pycompat.sysexecutable
1955 pythonlib = pycompat.sysexecutable
1954
1956
1955 fm.write(
1957 fm.write(
1956 b'pythonexe',
1958 b'pythonexe',
1957 _(b"checking Python executable (%s)\n"),
1959 _(b"checking Python executable (%s)\n"),
1958 pycompat.sysexecutable or _(b"unknown"),
1960 pycompat.sysexecutable or _(b"unknown"),
1959 )
1961 )
1960 fm.write(
1962 fm.write(
1961 b'pythonimplementation',
1963 b'pythonimplementation',
1962 _(b"checking Python implementation (%s)\n"),
1964 _(b"checking Python implementation (%s)\n"),
1963 pycompat.sysbytes(platform.python_implementation()),
1965 pycompat.sysbytes(platform.python_implementation()),
1964 )
1966 )
1965 fm.write(
1967 fm.write(
1966 b'pythonver',
1968 b'pythonver',
1967 _(b"checking Python version (%s)\n"),
1969 _(b"checking Python version (%s)\n"),
1968 (b"%d.%d.%d" % sys.version_info[:3]),
1970 (b"%d.%d.%d" % sys.version_info[:3]),
1969 )
1971 )
1970 fm.write(
1972 fm.write(
1971 b'pythonlib',
1973 b'pythonlib',
1972 _(b"checking Python lib (%s)...\n"),
1974 _(b"checking Python lib (%s)...\n"),
1973 pythonlib or _(b"unknown"),
1975 pythonlib or _(b"unknown"),
1974 )
1976 )
1975
1977
1976 try:
1978 try:
1977 from . import rustext # pytype: disable=import-error
1979 from . import rustext # pytype: disable=import-error
1978
1980
1979 rustext.__doc__ # trigger lazy import
1981 rustext.__doc__ # trigger lazy import
1980 except ImportError:
1982 except ImportError:
1981 rustext = None
1983 rustext = None
1982
1984
1983 security = set(sslutil.supportedprotocols)
1985 security = set(sslutil.supportedprotocols)
1984 if sslutil.hassni:
1986 if sslutil.hassni:
1985 security.add(b'sni')
1987 security.add(b'sni')
1986
1988
1987 fm.write(
1989 fm.write(
1988 b'pythonsecurity',
1990 b'pythonsecurity',
1989 _(b"checking Python security support (%s)\n"),
1991 _(b"checking Python security support (%s)\n"),
1990 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
1992 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
1991 )
1993 )
1992
1994
1993 # These are warnings, not errors. So don't increment problem count. This
1995 # These are warnings, not errors. So don't increment problem count. This
1994 # may change in the future.
1996 # may change in the future.
1995 if b'tls1.2' not in security:
1997 if b'tls1.2' not in security:
1996 fm.plain(
1998 fm.plain(
1997 _(
1999 _(
1998 b' TLS 1.2 not supported by Python install; '
2000 b' TLS 1.2 not supported by Python install; '
1999 b'network connections lack modern security\n'
2001 b'network connections lack modern security\n'
2000 )
2002 )
2001 )
2003 )
2002 if b'sni' not in security:
2004 if b'sni' not in security:
2003 fm.plain(
2005 fm.plain(
2004 _(
2006 _(
2005 b' SNI not supported by Python install; may have '
2007 b' SNI not supported by Python install; may have '
2006 b'connectivity issues with some servers\n'
2008 b'connectivity issues with some servers\n'
2007 )
2009 )
2008 )
2010 )
2009
2011
2010 fm.plain(
2012 fm.plain(
2011 _(
2013 _(
2012 b"checking Rust extensions (%s)\n"
2014 b"checking Rust extensions (%s)\n"
2013 % (b'missing' if rustext is None else b'installed')
2015 % (b'missing' if rustext is None else b'installed')
2014 ),
2016 ),
2015 )
2017 )
2016
2018
2017 # TODO print CA cert info
2019 # TODO print CA cert info
2018
2020
2019 # hg version
2021 # hg version
2020 hgver = util.version()
2022 hgver = util.version()
2021 fm.write(
2023 fm.write(
2022 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
2024 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
2023 )
2025 )
2024 fm.write(
2026 fm.write(
2025 b'hgverextra',
2027 b'hgverextra',
2026 _(b"checking Mercurial custom build (%s)\n"),
2028 _(b"checking Mercurial custom build (%s)\n"),
2027 b'+'.join(hgver.split(b'+')[1:]),
2029 b'+'.join(hgver.split(b'+')[1:]),
2028 )
2030 )
2029
2031
2030 # compiled modules
2032 # compiled modules
2031 hgmodules = None
2033 hgmodules = None
2032 if util.safehasattr(sys.modules[__name__], '__file__'):
2034 if util.safehasattr(sys.modules[__name__], '__file__'):
2033 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
2035 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
2034 elif getattr(sys, 'oxidized', False):
2036 elif getattr(sys, 'oxidized', False):
2035 hgmodules = pycompat.sysexecutable
2037 hgmodules = pycompat.sysexecutable
2036
2038
2037 fm.write(
2039 fm.write(
2038 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
2040 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
2039 )
2041 )
2040 fm.write(
2042 fm.write(
2041 b'hgmodules',
2043 b'hgmodules',
2042 _(b"checking installed modules (%s)...\n"),
2044 _(b"checking installed modules (%s)...\n"),
2043 hgmodules or _(b"unknown"),
2045 hgmodules or _(b"unknown"),
2044 )
2046 )
2045
2047
2046 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
2048 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
2047 rustext = rustandc # for now, that's the only case
2049 rustext = rustandc # for now, that's the only case
2048 cext = policy.policy in (b'c', b'allow') or rustandc
2050 cext = policy.policy in (b'c', b'allow') or rustandc
2049 nopure = cext or rustext
2051 nopure = cext or rustext
2050 if nopure:
2052 if nopure:
2051 err = None
2053 err = None
2052 try:
2054 try:
2053 if cext:
2055 if cext:
2054 from .cext import ( # pytype: disable=import-error
2056 from .cext import ( # pytype: disable=import-error
2055 base85,
2057 base85,
2056 bdiff,
2058 bdiff,
2057 mpatch,
2059 mpatch,
2058 osutil,
2060 osutil,
2059 )
2061 )
2060
2062
2061 # quiet pyflakes
2063 # quiet pyflakes
2062 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
2064 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
2063 if rustext:
2065 if rustext:
2064 from .rustext import ( # pytype: disable=import-error
2066 from .rustext import ( # pytype: disable=import-error
2065 ancestor,
2067 ancestor,
2066 dirstate,
2068 dirstate,
2067 )
2069 )
2068
2070
2069 dir(ancestor), dir(dirstate) # quiet pyflakes
2071 dir(ancestor), dir(dirstate) # quiet pyflakes
2070 except Exception as inst:
2072 except Exception as inst:
2071 err = stringutil.forcebytestr(inst)
2073 err = stringutil.forcebytestr(inst)
2072 problems += 1
2074 problems += 1
2073 fm.condwrite(err, b'extensionserror', b" %s\n", err)
2075 fm.condwrite(err, b'extensionserror', b" %s\n", err)
2074
2076
2075 compengines = util.compengines._engines.values()
2077 compengines = util.compengines._engines.values()
2076 fm.write(
2078 fm.write(
2077 b'compengines',
2079 b'compengines',
2078 _(b'checking registered compression engines (%s)\n'),
2080 _(b'checking registered compression engines (%s)\n'),
2079 fm.formatlist(
2081 fm.formatlist(
2080 sorted(e.name() for e in compengines),
2082 sorted(e.name() for e in compengines),
2081 name=b'compengine',
2083 name=b'compengine',
2082 fmt=b'%s',
2084 fmt=b'%s',
2083 sep=b', ',
2085 sep=b', ',
2084 ),
2086 ),
2085 )
2087 )
2086 fm.write(
2088 fm.write(
2087 b'compenginesavail',
2089 b'compenginesavail',
2088 _(b'checking available compression engines (%s)\n'),
2090 _(b'checking available compression engines (%s)\n'),
2089 fm.formatlist(
2091 fm.formatlist(
2090 sorted(e.name() for e in compengines if e.available()),
2092 sorted(e.name() for e in compengines if e.available()),
2091 name=b'compengine',
2093 name=b'compengine',
2092 fmt=b'%s',
2094 fmt=b'%s',
2093 sep=b', ',
2095 sep=b', ',
2094 ),
2096 ),
2095 )
2097 )
2096 wirecompengines = compression.compengines.supportedwireengines(
2098 wirecompengines = compression.compengines.supportedwireengines(
2097 compression.SERVERROLE
2099 compression.SERVERROLE
2098 )
2100 )
2099 fm.write(
2101 fm.write(
2100 b'compenginesserver',
2102 b'compenginesserver',
2101 _(
2103 _(
2102 b'checking available compression engines '
2104 b'checking available compression engines '
2103 b'for wire protocol (%s)\n'
2105 b'for wire protocol (%s)\n'
2104 ),
2106 ),
2105 fm.formatlist(
2107 fm.formatlist(
2106 [e.name() for e in wirecompengines if e.wireprotosupport()],
2108 [e.name() for e in wirecompengines if e.wireprotosupport()],
2107 name=b'compengine',
2109 name=b'compengine',
2108 fmt=b'%s',
2110 fmt=b'%s',
2109 sep=b', ',
2111 sep=b', ',
2110 ),
2112 ),
2111 )
2113 )
2112 re2 = b'missing'
2114 re2 = b'missing'
2113 if util._re2:
2115 if util._re2:
2114 re2 = b'available'
2116 re2 = b'available'
2115 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
2117 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
2116 fm.data(re2=bool(util._re2))
2118 fm.data(re2=bool(util._re2))
2117
2119
2118 # templates
2120 # templates
2119 p = templater.templatedir()
2121 p = templater.templatedir()
2120 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
2122 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
2121 fm.condwrite(not p, b'', _(b" no template directories found\n"))
2123 fm.condwrite(not p, b'', _(b" no template directories found\n"))
2122 if p:
2124 if p:
2123 (m, fp) = templater.try_open_template(b"map-cmdline.default")
2125 (m, fp) = templater.try_open_template(b"map-cmdline.default")
2124 if m:
2126 if m:
2125 # template found, check if it is working
2127 # template found, check if it is working
2126 err = None
2128 err = None
2127 try:
2129 try:
2128 templater.templater.frommapfile(m)
2130 templater.templater.frommapfile(m)
2129 except Exception as inst:
2131 except Exception as inst:
2130 err = stringutil.forcebytestr(inst)
2132 err = stringutil.forcebytestr(inst)
2131 p = None
2133 p = None
2132 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
2134 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
2133 else:
2135 else:
2134 p = None
2136 p = None
2135 fm.condwrite(
2137 fm.condwrite(
2136 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
2138 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
2137 )
2139 )
2138 fm.condwrite(
2140 fm.condwrite(
2139 not m,
2141 not m,
2140 b'defaulttemplatenotfound',
2142 b'defaulttemplatenotfound',
2141 _(b" template '%s' not found\n"),
2143 _(b" template '%s' not found\n"),
2142 b"default",
2144 b"default",
2143 )
2145 )
2144 if not p:
2146 if not p:
2145 problems += 1
2147 problems += 1
2146 fm.condwrite(
2148 fm.condwrite(
2147 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
2149 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
2148 )
2150 )
2149
2151
2150 # editor
2152 # editor
2151 editor = ui.geteditor()
2153 editor = ui.geteditor()
2152 editor = util.expandpath(editor)
2154 editor = util.expandpath(editor)
2153 editorbin = procutil.shellsplit(editor)[0]
2155 editorbin = procutil.shellsplit(editor)[0]
2154 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
2156 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
2155 cmdpath = procutil.findexe(editorbin)
2157 cmdpath = procutil.findexe(editorbin)
2156 fm.condwrite(
2158 fm.condwrite(
2157 not cmdpath and editor == b'vi',
2159 not cmdpath and editor == b'vi',
2158 b'vinotfound',
2160 b'vinotfound',
2159 _(
2161 _(
2160 b" No commit editor set and can't find %s in PATH\n"
2162 b" No commit editor set and can't find %s in PATH\n"
2161 b" (specify a commit editor in your configuration"
2163 b" (specify a commit editor in your configuration"
2162 b" file)\n"
2164 b" file)\n"
2163 ),
2165 ),
2164 not cmdpath and editor == b'vi' and editorbin,
2166 not cmdpath and editor == b'vi' and editorbin,
2165 )
2167 )
2166 fm.condwrite(
2168 fm.condwrite(
2167 not cmdpath and editor != b'vi',
2169 not cmdpath and editor != b'vi',
2168 b'editornotfound',
2170 b'editornotfound',
2169 _(
2171 _(
2170 b" Can't find editor '%s' in PATH\n"
2172 b" Can't find editor '%s' in PATH\n"
2171 b" (specify a commit editor in your configuration"
2173 b" (specify a commit editor in your configuration"
2172 b" file)\n"
2174 b" file)\n"
2173 ),
2175 ),
2174 not cmdpath and editorbin,
2176 not cmdpath and editorbin,
2175 )
2177 )
2176 if not cmdpath and editor != b'vi':
2178 if not cmdpath and editor != b'vi':
2177 problems += 1
2179 problems += 1
2178
2180
2179 # check username
2181 # check username
2180 username = None
2182 username = None
2181 err = None
2183 err = None
2182 try:
2184 try:
2183 username = ui.username()
2185 username = ui.username()
2184 except error.Abort as e:
2186 except error.Abort as e:
2185 err = e.message
2187 err = e.message
2186 problems += 1
2188 problems += 1
2187
2189
2188 fm.condwrite(
2190 fm.condwrite(
2189 username, b'username', _(b"checking username (%s)\n"), username
2191 username, b'username', _(b"checking username (%s)\n"), username
2190 )
2192 )
2191 fm.condwrite(
2193 fm.condwrite(
2192 err,
2194 err,
2193 b'usernameerror',
2195 b'usernameerror',
2194 _(
2196 _(
2195 b"checking username...\n %s\n"
2197 b"checking username...\n %s\n"
2196 b" (specify a username in your configuration file)\n"
2198 b" (specify a username in your configuration file)\n"
2197 ),
2199 ),
2198 err,
2200 err,
2199 )
2201 )
2200
2202
2201 for name, mod in extensions.extensions():
2203 for name, mod in extensions.extensions():
2202 handler = getattr(mod, 'debuginstall', None)
2204 handler = getattr(mod, 'debuginstall', None)
2203 if handler is not None:
2205 if handler is not None:
2204 problems += handler(ui, fm)
2206 problems += handler(ui, fm)
2205
2207
2206 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
2208 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
2207 if not problems:
2209 if not problems:
2208 fm.data(problems=problems)
2210 fm.data(problems=problems)
2209 fm.condwrite(
2211 fm.condwrite(
2210 problems,
2212 problems,
2211 b'problems',
2213 b'problems',
2212 _(b"%d problems detected, please check your install!\n"),
2214 _(b"%d problems detected, please check your install!\n"),
2213 problems,
2215 problems,
2214 )
2216 )
2215 fm.end()
2217 fm.end()
2216
2218
2217 return problems
2219 return problems
2218
2220
2219
2221
2220 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
2222 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
2221 def debugknown(ui, repopath, *ids, **opts):
2223 def debugknown(ui, repopath, *ids, **opts):
2222 """test whether node ids are known to a repo
2224 """test whether node ids are known to a repo
2223
2225
2224 Every ID must be a full-length hex node id string. Returns a list of 0s
2226 Every ID must be a full-length hex node id string. Returns a list of 0s
2225 and 1s indicating unknown/known.
2227 and 1s indicating unknown/known.
2226 """
2228 """
2227 opts = pycompat.byteskwargs(opts)
2229 opts = pycompat.byteskwargs(opts)
2228 repo = hg.peer(ui, opts, repopath)
2230 repo = hg.peer(ui, opts, repopath)
2229 if not repo.capable(b'known'):
2231 if not repo.capable(b'known'):
2230 raise error.Abort(b"known() not supported by target repository")
2232 raise error.Abort(b"known() not supported by target repository")
2231 flags = repo.known([bin(s) for s in ids])
2233 flags = repo.known([bin(s) for s in ids])
2232 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
2234 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
2233
2235
2234
2236
2235 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
2237 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
2236 def debuglabelcomplete(ui, repo, *args):
2238 def debuglabelcomplete(ui, repo, *args):
2237 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
2239 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
2238 debugnamecomplete(ui, repo, *args)
2240 debugnamecomplete(ui, repo, *args)
2239
2241
2240
2242
2241 @command(
2243 @command(
2242 b'debuglocks',
2244 b'debuglocks',
2243 [
2245 [
2244 (b'L', b'force-free-lock', None, _(b'free the store lock (DANGEROUS)')),
2246 (b'L', b'force-free-lock', None, _(b'free the store lock (DANGEROUS)')),
2245 (
2247 (
2246 b'W',
2248 b'W',
2247 b'force-free-wlock',
2249 b'force-free-wlock',
2248 None,
2250 None,
2249 _(b'free the working state lock (DANGEROUS)'),
2251 _(b'free the working state lock (DANGEROUS)'),
2250 ),
2252 ),
2251 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
2253 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
2252 (
2254 (
2253 b'S',
2255 b'S',
2254 b'set-wlock',
2256 b'set-wlock',
2255 None,
2257 None,
2256 _(b'set the working state lock until stopped'),
2258 _(b'set the working state lock until stopped'),
2257 ),
2259 ),
2258 ],
2260 ],
2259 _(b'[OPTION]...'),
2261 _(b'[OPTION]...'),
2260 )
2262 )
2261 def debuglocks(ui, repo, **opts):
2263 def debuglocks(ui, repo, **opts):
2262 """show or modify state of locks
2264 """show or modify state of locks
2263
2265
2264 By default, this command will show which locks are held. This
2266 By default, this command will show which locks are held. This
2265 includes the user and process holding the lock, the amount of time
2267 includes the user and process holding the lock, the amount of time
2266 the lock has been held, and the machine name where the process is
2268 the lock has been held, and the machine name where the process is
2267 running if it's not local.
2269 running if it's not local.
2268
2270
2269 Locks protect the integrity of Mercurial's data, so should be
2271 Locks protect the integrity of Mercurial's data, so should be
2270 treated with care. System crashes or other interruptions may cause
2272 treated with care. System crashes or other interruptions may cause
2271 locks to not be properly released, though Mercurial will usually
2273 locks to not be properly released, though Mercurial will usually
2272 detect and remove such stale locks automatically.
2274 detect and remove such stale locks automatically.
2273
2275
2274 However, detecting stale locks may not always be possible (for
2276 However, detecting stale locks may not always be possible (for
2275 instance, on a shared filesystem). Removing locks may also be
2277 instance, on a shared filesystem). Removing locks may also be
2276 blocked by filesystem permissions.
2278 blocked by filesystem permissions.
2277
2279
2278 Setting a lock will prevent other commands from changing the data.
2280 Setting a lock will prevent other commands from changing the data.
2279 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
2281 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
2280 The set locks are removed when the command exits.
2282 The set locks are removed when the command exits.
2281
2283
2282 Returns 0 if no locks are held.
2284 Returns 0 if no locks are held.
2283
2285
2284 """
2286 """
2285
2287
2286 if opts.get('force_free_lock'):
2288 if opts.get('force_free_lock'):
2287 repo.svfs.tryunlink(b'lock')
2289 repo.svfs.tryunlink(b'lock')
2288 if opts.get('force_free_wlock'):
2290 if opts.get('force_free_wlock'):
2289 repo.vfs.tryunlink(b'wlock')
2291 repo.vfs.tryunlink(b'wlock')
2290 if opts.get('force_free_lock') or opts.get('force_free_wlock'):
2292 if opts.get('force_free_lock') or opts.get('force_free_wlock'):
2291 return 0
2293 return 0
2292
2294
2293 locks = []
2295 locks = []
2294 try:
2296 try:
2295 if opts.get('set_wlock'):
2297 if opts.get('set_wlock'):
2296 try:
2298 try:
2297 locks.append(repo.wlock(False))
2299 locks.append(repo.wlock(False))
2298 except error.LockHeld:
2300 except error.LockHeld:
2299 raise error.Abort(_(b'wlock is already held'))
2301 raise error.Abort(_(b'wlock is already held'))
2300 if opts.get('set_lock'):
2302 if opts.get('set_lock'):
2301 try:
2303 try:
2302 locks.append(repo.lock(False))
2304 locks.append(repo.lock(False))
2303 except error.LockHeld:
2305 except error.LockHeld:
2304 raise error.Abort(_(b'lock is already held'))
2306 raise error.Abort(_(b'lock is already held'))
2305 if len(locks):
2307 if len(locks):
2306 try:
2308 try:
2307 if ui.interactive():
2309 if ui.interactive():
2308 prompt = _(b"ready to release the lock (y)? $$ &Yes")
2310 prompt = _(b"ready to release the lock (y)? $$ &Yes")
2309 ui.promptchoice(prompt)
2311 ui.promptchoice(prompt)
2310 else:
2312 else:
2311 msg = b"%d locks held, waiting for signal\n"
2313 msg = b"%d locks held, waiting for signal\n"
2312 msg %= len(locks)
2314 msg %= len(locks)
2313 ui.status(msg)
2315 ui.status(msg)
2314 while True: # XXX wait for a signal
2316 while True: # XXX wait for a signal
2315 time.sleep(0.1)
2317 time.sleep(0.1)
2316 except KeyboardInterrupt:
2318 except KeyboardInterrupt:
2317 msg = b"signal-received releasing locks\n"
2319 msg = b"signal-received releasing locks\n"
2318 ui.status(msg)
2320 ui.status(msg)
2319 return 0
2321 return 0
2320 finally:
2322 finally:
2321 release(*locks)
2323 release(*locks)
2322
2324
2323 now = time.time()
2325 now = time.time()
2324 held = 0
2326 held = 0
2325
2327
2326 def report(vfs, name, method):
2328 def report(vfs, name, method):
2327 # this causes stale locks to get reaped for more accurate reporting
2329 # this causes stale locks to get reaped for more accurate reporting
2328 try:
2330 try:
2329 l = method(False)
2331 l = method(False)
2330 except error.LockHeld:
2332 except error.LockHeld:
2331 l = None
2333 l = None
2332
2334
2333 if l:
2335 if l:
2334 l.release()
2336 l.release()
2335 else:
2337 else:
2336 try:
2338 try:
2337 st = vfs.lstat(name)
2339 st = vfs.lstat(name)
2338 age = now - st[stat.ST_MTIME]
2340 age = now - st[stat.ST_MTIME]
2339 user = util.username(st.st_uid)
2341 user = util.username(st.st_uid)
2340 locker = vfs.readlock(name)
2342 locker = vfs.readlock(name)
2341 if b":" in locker:
2343 if b":" in locker:
2342 host, pid = locker.split(b':')
2344 host, pid = locker.split(b':')
2343 if host == socket.gethostname():
2345 if host == socket.gethostname():
2344 locker = b'user %s, process %s' % (user or b'None', pid)
2346 locker = b'user %s, process %s' % (user or b'None', pid)
2345 else:
2347 else:
2346 locker = b'user %s, process %s, host %s' % (
2348 locker = b'user %s, process %s, host %s' % (
2347 user or b'None',
2349 user or b'None',
2348 pid,
2350 pid,
2349 host,
2351 host,
2350 )
2352 )
2351 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
2353 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
2352 return 1
2354 return 1
2353 except OSError as e:
2355 except OSError as e:
2354 if e.errno != errno.ENOENT:
2356 if e.errno != errno.ENOENT:
2355 raise
2357 raise
2356
2358
2357 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
2359 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
2358 return 0
2360 return 0
2359
2361
2360 held += report(repo.svfs, b"lock", repo.lock)
2362 held += report(repo.svfs, b"lock", repo.lock)
2361 held += report(repo.vfs, b"wlock", repo.wlock)
2363 held += report(repo.vfs, b"wlock", repo.wlock)
2362
2364
2363 return held
2365 return held
2364
2366
2365
2367
2366 @command(
2368 @command(
2367 b'debugmanifestfulltextcache',
2369 b'debugmanifestfulltextcache',
2368 [
2370 [
2369 (b'', b'clear', False, _(b'clear the cache')),
2371 (b'', b'clear', False, _(b'clear the cache')),
2370 (
2372 (
2371 b'a',
2373 b'a',
2372 b'add',
2374 b'add',
2373 [],
2375 [],
2374 _(b'add the given manifest nodes to the cache'),
2376 _(b'add the given manifest nodes to the cache'),
2375 _(b'NODE'),
2377 _(b'NODE'),
2376 ),
2378 ),
2377 ],
2379 ],
2378 b'',
2380 b'',
2379 )
2381 )
2380 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
2382 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
2381 """show, clear or amend the contents of the manifest fulltext cache"""
2383 """show, clear or amend the contents of the manifest fulltext cache"""
2382
2384
2383 def getcache():
2385 def getcache():
2384 r = repo.manifestlog.getstorage(b'')
2386 r = repo.manifestlog.getstorage(b'')
2385 try:
2387 try:
2386 return r._fulltextcache
2388 return r._fulltextcache
2387 except AttributeError:
2389 except AttributeError:
2388 msg = _(
2390 msg = _(
2389 b"Current revlog implementation doesn't appear to have a "
2391 b"Current revlog implementation doesn't appear to have a "
2390 b"manifest fulltext cache\n"
2392 b"manifest fulltext cache\n"
2391 )
2393 )
2392 raise error.Abort(msg)
2394 raise error.Abort(msg)
2393
2395
2394 if opts.get('clear'):
2396 if opts.get('clear'):
2395 with repo.wlock():
2397 with repo.wlock():
2396 cache = getcache()
2398 cache = getcache()
2397 cache.clear(clear_persisted_data=True)
2399 cache.clear(clear_persisted_data=True)
2398 return
2400 return
2399
2401
2400 if add:
2402 if add:
2401 with repo.wlock():
2403 with repo.wlock():
2402 m = repo.manifestlog
2404 m = repo.manifestlog
2403 store = m.getstorage(b'')
2405 store = m.getstorage(b'')
2404 for n in add:
2406 for n in add:
2405 try:
2407 try:
2406 manifest = m[store.lookup(n)]
2408 manifest = m[store.lookup(n)]
2407 except error.LookupError as e:
2409 except error.LookupError as e:
2408 raise error.Abort(
2410 raise error.Abort(
2409 bytes(e), hint=b"Check your manifest node id"
2411 bytes(e), hint=b"Check your manifest node id"
2410 )
2412 )
2411 manifest.read() # stores revisision in cache too
2413 manifest.read() # stores revisision in cache too
2412 return
2414 return
2413
2415
2414 cache = getcache()
2416 cache = getcache()
2415 if not len(cache):
2417 if not len(cache):
2416 ui.write(_(b'cache empty\n'))
2418 ui.write(_(b'cache empty\n'))
2417 else:
2419 else:
2418 ui.write(
2420 ui.write(
2419 _(
2421 _(
2420 b'cache contains %d manifest entries, in order of most to '
2422 b'cache contains %d manifest entries, in order of most to '
2421 b'least recent:\n'
2423 b'least recent:\n'
2422 )
2424 )
2423 % (len(cache),)
2425 % (len(cache),)
2424 )
2426 )
2425 totalsize = 0
2427 totalsize = 0
2426 for nodeid in cache:
2428 for nodeid in cache:
2427 # Use cache.get to not update the LRU order
2429 # Use cache.get to not update the LRU order
2428 data = cache.peek(nodeid)
2430 data = cache.peek(nodeid)
2429 size = len(data)
2431 size = len(data)
2430 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
2432 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
2431 ui.write(
2433 ui.write(
2432 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
2434 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
2433 )
2435 )
2434 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
2436 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
2435 ui.write(
2437 ui.write(
2436 _(b'total cache data size %s, on-disk %s\n')
2438 _(b'total cache data size %s, on-disk %s\n')
2437 % (util.bytecount(totalsize), util.bytecount(ondisk))
2439 % (util.bytecount(totalsize), util.bytecount(ondisk))
2438 )
2440 )
2439
2441
2440
2442
2441 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
2443 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
2442 def debugmergestate(ui, repo, *args, **opts):
2444 def debugmergestate(ui, repo, *args, **opts):
2443 """print merge state
2445 """print merge state
2444
2446
2445 Use --verbose to print out information about whether v1 or v2 merge state
2447 Use --verbose to print out information about whether v1 or v2 merge state
2446 was chosen."""
2448 was chosen."""
2447
2449
2448 if ui.verbose:
2450 if ui.verbose:
2449 ms = mergestatemod.mergestate(repo)
2451 ms = mergestatemod.mergestate(repo)
2450
2452
2451 # sort so that reasonable information is on top
2453 # sort so that reasonable information is on top
2452 v1records = ms._readrecordsv1()
2454 v1records = ms._readrecordsv1()
2453 v2records = ms._readrecordsv2()
2455 v2records = ms._readrecordsv2()
2454
2456
2455 if not v1records and not v2records:
2457 if not v1records and not v2records:
2456 pass
2458 pass
2457 elif not v2records:
2459 elif not v2records:
2458 ui.writenoi18n(b'no version 2 merge state\n')
2460 ui.writenoi18n(b'no version 2 merge state\n')
2459 elif ms._v1v2match(v1records, v2records):
2461 elif ms._v1v2match(v1records, v2records):
2460 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
2462 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
2461 else:
2463 else:
2462 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2464 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2463
2465
2464 opts = pycompat.byteskwargs(opts)
2466 opts = pycompat.byteskwargs(opts)
2465 if not opts[b'template']:
2467 if not opts[b'template']:
2466 opts[b'template'] = (
2468 opts[b'template'] = (
2467 b'{if(commits, "", "no merge state found\n")}'
2469 b'{if(commits, "", "no merge state found\n")}'
2468 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2470 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2469 b'{files % "file: {path} (state \\"{state}\\")\n'
2471 b'{files % "file: {path} (state \\"{state}\\")\n'
2470 b'{if(local_path, "'
2472 b'{if(local_path, "'
2471 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2473 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2472 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2474 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2473 b' other path: {other_path} (node {other_node})\n'
2475 b' other path: {other_path} (node {other_node})\n'
2474 b'")}'
2476 b'")}'
2475 b'{if(rename_side, "'
2477 b'{if(rename_side, "'
2476 b' rename side: {rename_side}\n'
2478 b' rename side: {rename_side}\n'
2477 b' renamed path: {renamed_path}\n'
2479 b' renamed path: {renamed_path}\n'
2478 b'")}'
2480 b'")}'
2479 b'{extras % " extra: {key} = {value}\n"}'
2481 b'{extras % " extra: {key} = {value}\n"}'
2480 b'"}'
2482 b'"}'
2481 b'{extras % "extra: {file} ({key} = {value})\n"}'
2483 b'{extras % "extra: {file} ({key} = {value})\n"}'
2482 )
2484 )
2483
2485
2484 ms = mergestatemod.mergestate.read(repo)
2486 ms = mergestatemod.mergestate.read(repo)
2485
2487
2486 fm = ui.formatter(b'debugmergestate', opts)
2488 fm = ui.formatter(b'debugmergestate', opts)
2487 fm.startitem()
2489 fm.startitem()
2488
2490
2489 fm_commits = fm.nested(b'commits')
2491 fm_commits = fm.nested(b'commits')
2490 if ms.active():
2492 if ms.active():
2491 for name, node, label_index in (
2493 for name, node, label_index in (
2492 (b'local', ms.local, 0),
2494 (b'local', ms.local, 0),
2493 (b'other', ms.other, 1),
2495 (b'other', ms.other, 1),
2494 ):
2496 ):
2495 fm_commits.startitem()
2497 fm_commits.startitem()
2496 fm_commits.data(name=name)
2498 fm_commits.data(name=name)
2497 fm_commits.data(node=hex(node))
2499 fm_commits.data(node=hex(node))
2498 if ms._labels and len(ms._labels) > label_index:
2500 if ms._labels and len(ms._labels) > label_index:
2499 fm_commits.data(label=ms._labels[label_index])
2501 fm_commits.data(label=ms._labels[label_index])
2500 fm_commits.end()
2502 fm_commits.end()
2501
2503
2502 fm_files = fm.nested(b'files')
2504 fm_files = fm.nested(b'files')
2503 if ms.active():
2505 if ms.active():
2504 for f in ms:
2506 for f in ms:
2505 fm_files.startitem()
2507 fm_files.startitem()
2506 fm_files.data(path=f)
2508 fm_files.data(path=f)
2507 state = ms._state[f]
2509 state = ms._state[f]
2508 fm_files.data(state=state[0])
2510 fm_files.data(state=state[0])
2509 if state[0] in (
2511 if state[0] in (
2510 mergestatemod.MERGE_RECORD_UNRESOLVED,
2512 mergestatemod.MERGE_RECORD_UNRESOLVED,
2511 mergestatemod.MERGE_RECORD_RESOLVED,
2513 mergestatemod.MERGE_RECORD_RESOLVED,
2512 ):
2514 ):
2513 fm_files.data(local_key=state[1])
2515 fm_files.data(local_key=state[1])
2514 fm_files.data(local_path=state[2])
2516 fm_files.data(local_path=state[2])
2515 fm_files.data(ancestor_path=state[3])
2517 fm_files.data(ancestor_path=state[3])
2516 fm_files.data(ancestor_node=state[4])
2518 fm_files.data(ancestor_node=state[4])
2517 fm_files.data(other_path=state[5])
2519 fm_files.data(other_path=state[5])
2518 fm_files.data(other_node=state[6])
2520 fm_files.data(other_node=state[6])
2519 fm_files.data(local_flags=state[7])
2521 fm_files.data(local_flags=state[7])
2520 elif state[0] in (
2522 elif state[0] in (
2521 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2523 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2522 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2524 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2523 ):
2525 ):
2524 fm_files.data(renamed_path=state[1])
2526 fm_files.data(renamed_path=state[1])
2525 fm_files.data(rename_side=state[2])
2527 fm_files.data(rename_side=state[2])
2526 fm_extras = fm_files.nested(b'extras')
2528 fm_extras = fm_files.nested(b'extras')
2527 for k, v in sorted(ms.extras(f).items()):
2529 for k, v in sorted(ms.extras(f).items()):
2528 fm_extras.startitem()
2530 fm_extras.startitem()
2529 fm_extras.data(key=k)
2531 fm_extras.data(key=k)
2530 fm_extras.data(value=v)
2532 fm_extras.data(value=v)
2531 fm_extras.end()
2533 fm_extras.end()
2532
2534
2533 fm_files.end()
2535 fm_files.end()
2534
2536
2535 fm_extras = fm.nested(b'extras')
2537 fm_extras = fm.nested(b'extras')
2536 for f, d in sorted(ms.allextras().items()):
2538 for f, d in sorted(ms.allextras().items()):
2537 if f in ms:
2539 if f in ms:
2538 # If file is in mergestate, we have already processed it's extras
2540 # If file is in mergestate, we have already processed it's extras
2539 continue
2541 continue
2540 for k, v in d.items():
2542 for k, v in d.items():
2541 fm_extras.startitem()
2543 fm_extras.startitem()
2542 fm_extras.data(file=f)
2544 fm_extras.data(file=f)
2543 fm_extras.data(key=k)
2545 fm_extras.data(key=k)
2544 fm_extras.data(value=v)
2546 fm_extras.data(value=v)
2545 fm_extras.end()
2547 fm_extras.end()
2546
2548
2547 fm.end()
2549 fm.end()
2548
2550
2549
2551
2550 @command(b'debugnamecomplete', [], _(b'NAME...'))
2552 @command(b'debugnamecomplete', [], _(b'NAME...'))
2551 def debugnamecomplete(ui, repo, *args):
2553 def debugnamecomplete(ui, repo, *args):
2552 '''complete "names" - tags, open branch names, bookmark names'''
2554 '''complete "names" - tags, open branch names, bookmark names'''
2553
2555
2554 names = set()
2556 names = set()
2555 # since we previously only listed open branches, we will handle that
2557 # since we previously only listed open branches, we will handle that
2556 # specially (after this for loop)
2558 # specially (after this for loop)
2557 for name, ns in repo.names.items():
2559 for name, ns in repo.names.items():
2558 if name != b'branches':
2560 if name != b'branches':
2559 names.update(ns.listnames(repo))
2561 names.update(ns.listnames(repo))
2560 names.update(
2562 names.update(
2561 tag
2563 tag
2562 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2564 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2563 if not closed
2565 if not closed
2564 )
2566 )
2565 completions = set()
2567 completions = set()
2566 if not args:
2568 if not args:
2567 args = [b'']
2569 args = [b'']
2568 for a in args:
2570 for a in args:
2569 completions.update(n for n in names if n.startswith(a))
2571 completions.update(n for n in names if n.startswith(a))
2570 ui.write(b'\n'.join(sorted(completions)))
2572 ui.write(b'\n'.join(sorted(completions)))
2571 ui.write(b'\n')
2573 ui.write(b'\n')
2572
2574
2573
2575
2574 @command(
2576 @command(
2575 b'debugnodemap',
2577 b'debugnodemap',
2576 [
2578 [
2577 (
2579 (
2578 b'',
2580 b'',
2579 b'dump-new',
2581 b'dump-new',
2580 False,
2582 False,
2581 _(b'write a (new) persistent binary nodemap on stdout'),
2583 _(b'write a (new) persistent binary nodemap on stdout'),
2582 ),
2584 ),
2583 (b'', b'dump-disk', False, _(b'dump on-disk data on stdout')),
2585 (b'', b'dump-disk', False, _(b'dump on-disk data on stdout')),
2584 (
2586 (
2585 b'',
2587 b'',
2586 b'check',
2588 b'check',
2587 False,
2589 False,
2588 _(b'check that the data on disk data are correct.'),
2590 _(b'check that the data on disk data are correct.'),
2589 ),
2591 ),
2590 (
2592 (
2591 b'',
2593 b'',
2592 b'metadata',
2594 b'metadata',
2593 False,
2595 False,
2594 _(b'display the on disk meta data for the nodemap'),
2596 _(b'display the on disk meta data for the nodemap'),
2595 ),
2597 ),
2596 ],
2598 ],
2597 )
2599 )
2598 def debugnodemap(ui, repo, **opts):
2600 def debugnodemap(ui, repo, **opts):
2599 """write and inspect on disk nodemap"""
2601 """write and inspect on disk nodemap"""
2600 if opts['dump_new']:
2602 if opts['dump_new']:
2601 unfi = repo.unfiltered()
2603 unfi = repo.unfiltered()
2602 cl = unfi.changelog
2604 cl = unfi.changelog
2603 if util.safehasattr(cl.index, "nodemap_data_all"):
2605 if util.safehasattr(cl.index, "nodemap_data_all"):
2604 data = cl.index.nodemap_data_all()
2606 data = cl.index.nodemap_data_all()
2605 else:
2607 else:
2606 data = nodemap.persistent_data(cl.index)
2608 data = nodemap.persistent_data(cl.index)
2607 ui.write(data)
2609 ui.write(data)
2608 elif opts['dump_disk']:
2610 elif opts['dump_disk']:
2609 unfi = repo.unfiltered()
2611 unfi = repo.unfiltered()
2610 cl = unfi.changelog
2612 cl = unfi.changelog
2611 nm_data = nodemap.persisted_data(cl)
2613 nm_data = nodemap.persisted_data(cl)
2612 if nm_data is not None:
2614 if nm_data is not None:
2613 docket, data = nm_data
2615 docket, data = nm_data
2614 ui.write(data[:])
2616 ui.write(data[:])
2615 elif opts['check']:
2617 elif opts['check']:
2616 unfi = repo.unfiltered()
2618 unfi = repo.unfiltered()
2617 cl = unfi.changelog
2619 cl = unfi.changelog
2618 nm_data = nodemap.persisted_data(cl)
2620 nm_data = nodemap.persisted_data(cl)
2619 if nm_data is not None:
2621 if nm_data is not None:
2620 docket, data = nm_data
2622 docket, data = nm_data
2621 return nodemap.check_data(ui, cl.index, data)
2623 return nodemap.check_data(ui, cl.index, data)
2622 elif opts['metadata']:
2624 elif opts['metadata']:
2623 unfi = repo.unfiltered()
2625 unfi = repo.unfiltered()
2624 cl = unfi.changelog
2626 cl = unfi.changelog
2625 nm_data = nodemap.persisted_data(cl)
2627 nm_data = nodemap.persisted_data(cl)
2626 if nm_data is not None:
2628 if nm_data is not None:
2627 docket, data = nm_data
2629 docket, data = nm_data
2628 ui.write((b"uid: %s\n") % docket.uid)
2630 ui.write((b"uid: %s\n") % docket.uid)
2629 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2631 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2630 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2632 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2631 ui.write((b"data-length: %d\n") % docket.data_length)
2633 ui.write((b"data-length: %d\n") % docket.data_length)
2632 ui.write((b"data-unused: %d\n") % docket.data_unused)
2634 ui.write((b"data-unused: %d\n") % docket.data_unused)
2633 unused_perc = docket.data_unused * 100.0 / docket.data_length
2635 unused_perc = docket.data_unused * 100.0 / docket.data_length
2634 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2636 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2635
2637
2636
2638
2637 @command(
2639 @command(
2638 b'debugobsolete',
2640 b'debugobsolete',
2639 [
2641 [
2640 (b'', b'flags', 0, _(b'markers flag')),
2642 (b'', b'flags', 0, _(b'markers flag')),
2641 (
2643 (
2642 b'',
2644 b'',
2643 b'record-parents',
2645 b'record-parents',
2644 False,
2646 False,
2645 _(b'record parent information for the precursor'),
2647 _(b'record parent information for the precursor'),
2646 ),
2648 ),
2647 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2649 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2648 (
2650 (
2649 b'',
2651 b'',
2650 b'exclusive',
2652 b'exclusive',
2651 False,
2653 False,
2652 _(b'restrict display to markers only relevant to REV'),
2654 _(b'restrict display to markers only relevant to REV'),
2653 ),
2655 ),
2654 (b'', b'index', False, _(b'display index of the marker')),
2656 (b'', b'index', False, _(b'display index of the marker')),
2655 (b'', b'delete', [], _(b'delete markers specified by indices')),
2657 (b'', b'delete', [], _(b'delete markers specified by indices')),
2656 ]
2658 ]
2657 + cmdutil.commitopts2
2659 + cmdutil.commitopts2
2658 + cmdutil.formatteropts,
2660 + cmdutil.formatteropts,
2659 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2661 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2660 )
2662 )
2661 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2663 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2662 """create arbitrary obsolete marker
2664 """create arbitrary obsolete marker
2663
2665
2664 With no arguments, displays the list of obsolescence markers."""
2666 With no arguments, displays the list of obsolescence markers."""
2665
2667
2666 opts = pycompat.byteskwargs(opts)
2668 opts = pycompat.byteskwargs(opts)
2667
2669
2668 def parsenodeid(s):
2670 def parsenodeid(s):
2669 try:
2671 try:
2670 # We do not use revsingle/revrange functions here to accept
2672 # We do not use revsingle/revrange functions here to accept
2671 # arbitrary node identifiers, possibly not present in the
2673 # arbitrary node identifiers, possibly not present in the
2672 # local repository.
2674 # local repository.
2673 n = bin(s)
2675 n = bin(s)
2674 if len(n) != repo.nodeconstants.nodelen:
2676 if len(n) != repo.nodeconstants.nodelen:
2675 raise ValueError
2677 raise ValueError
2676 return n
2678 return n
2677 except ValueError:
2679 except ValueError:
2678 raise error.InputError(
2680 raise error.InputError(
2679 b'changeset references must be full hexadecimal '
2681 b'changeset references must be full hexadecimal '
2680 b'node identifiers'
2682 b'node identifiers'
2681 )
2683 )
2682
2684
2683 if opts.get(b'delete'):
2685 if opts.get(b'delete'):
2684 indices = []
2686 indices = []
2685 for v in opts.get(b'delete'):
2687 for v in opts.get(b'delete'):
2686 try:
2688 try:
2687 indices.append(int(v))
2689 indices.append(int(v))
2688 except ValueError:
2690 except ValueError:
2689 raise error.InputError(
2691 raise error.InputError(
2690 _(b'invalid index value: %r') % v,
2692 _(b'invalid index value: %r') % v,
2691 hint=_(b'use integers for indices'),
2693 hint=_(b'use integers for indices'),
2692 )
2694 )
2693
2695
2694 if repo.currenttransaction():
2696 if repo.currenttransaction():
2695 raise error.Abort(
2697 raise error.Abort(
2696 _(b'cannot delete obsmarkers in the middle of transaction.')
2698 _(b'cannot delete obsmarkers in the middle of transaction.')
2697 )
2699 )
2698
2700
2699 with repo.lock():
2701 with repo.lock():
2700 n = repair.deleteobsmarkers(repo.obsstore, indices)
2702 n = repair.deleteobsmarkers(repo.obsstore, indices)
2701 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2703 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2702
2704
2703 return
2705 return
2704
2706
2705 if precursor is not None:
2707 if precursor is not None:
2706 if opts[b'rev']:
2708 if opts[b'rev']:
2707 raise error.InputError(
2709 raise error.InputError(
2708 b'cannot select revision when creating marker'
2710 b'cannot select revision when creating marker'
2709 )
2711 )
2710 metadata = {}
2712 metadata = {}
2711 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2713 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2712 succs = tuple(parsenodeid(succ) for succ in successors)
2714 succs = tuple(parsenodeid(succ) for succ in successors)
2713 l = repo.lock()
2715 l = repo.lock()
2714 try:
2716 try:
2715 tr = repo.transaction(b'debugobsolete')
2717 tr = repo.transaction(b'debugobsolete')
2716 try:
2718 try:
2717 date = opts.get(b'date')
2719 date = opts.get(b'date')
2718 if date:
2720 if date:
2719 date = dateutil.parsedate(date)
2721 date = dateutil.parsedate(date)
2720 else:
2722 else:
2721 date = None
2723 date = None
2722 prec = parsenodeid(precursor)
2724 prec = parsenodeid(precursor)
2723 parents = None
2725 parents = None
2724 if opts[b'record_parents']:
2726 if opts[b'record_parents']:
2725 if prec not in repo.unfiltered():
2727 if prec not in repo.unfiltered():
2726 raise error.Abort(
2728 raise error.Abort(
2727 b'cannot used --record-parents on '
2729 b'cannot used --record-parents on '
2728 b'unknown changesets'
2730 b'unknown changesets'
2729 )
2731 )
2730 parents = repo.unfiltered()[prec].parents()
2732 parents = repo.unfiltered()[prec].parents()
2731 parents = tuple(p.node() for p in parents)
2733 parents = tuple(p.node() for p in parents)
2732 repo.obsstore.create(
2734 repo.obsstore.create(
2733 tr,
2735 tr,
2734 prec,
2736 prec,
2735 succs,
2737 succs,
2736 opts[b'flags'],
2738 opts[b'flags'],
2737 parents=parents,
2739 parents=parents,
2738 date=date,
2740 date=date,
2739 metadata=metadata,
2741 metadata=metadata,
2740 ui=ui,
2742 ui=ui,
2741 )
2743 )
2742 tr.close()
2744 tr.close()
2743 except ValueError as exc:
2745 except ValueError as exc:
2744 raise error.Abort(
2746 raise error.Abort(
2745 _(b'bad obsmarker input: %s') % stringutil.forcebytestr(exc)
2747 _(b'bad obsmarker input: %s') % stringutil.forcebytestr(exc)
2746 )
2748 )
2747 finally:
2749 finally:
2748 tr.release()
2750 tr.release()
2749 finally:
2751 finally:
2750 l.release()
2752 l.release()
2751 else:
2753 else:
2752 if opts[b'rev']:
2754 if opts[b'rev']:
2753 revs = logcmdutil.revrange(repo, opts[b'rev'])
2755 revs = logcmdutil.revrange(repo, opts[b'rev'])
2754 nodes = [repo[r].node() for r in revs]
2756 nodes = [repo[r].node() for r in revs]
2755 markers = list(
2757 markers = list(
2756 obsutil.getmarkers(
2758 obsutil.getmarkers(
2757 repo, nodes=nodes, exclusive=opts[b'exclusive']
2759 repo, nodes=nodes, exclusive=opts[b'exclusive']
2758 )
2760 )
2759 )
2761 )
2760 markers.sort(key=lambda x: x._data)
2762 markers.sort(key=lambda x: x._data)
2761 else:
2763 else:
2762 markers = obsutil.getmarkers(repo)
2764 markers = obsutil.getmarkers(repo)
2763
2765
2764 markerstoiter = markers
2766 markerstoiter = markers
2765 isrelevant = lambda m: True
2767 isrelevant = lambda m: True
2766 if opts.get(b'rev') and opts.get(b'index'):
2768 if opts.get(b'rev') and opts.get(b'index'):
2767 markerstoiter = obsutil.getmarkers(repo)
2769 markerstoiter = obsutil.getmarkers(repo)
2768 markerset = set(markers)
2770 markerset = set(markers)
2769 isrelevant = lambda m: m in markerset
2771 isrelevant = lambda m: m in markerset
2770
2772
2771 fm = ui.formatter(b'debugobsolete', opts)
2773 fm = ui.formatter(b'debugobsolete', opts)
2772 for i, m in enumerate(markerstoiter):
2774 for i, m in enumerate(markerstoiter):
2773 if not isrelevant(m):
2775 if not isrelevant(m):
2774 # marker can be irrelevant when we're iterating over a set
2776 # marker can be irrelevant when we're iterating over a set
2775 # of markers (markerstoiter) which is bigger than the set
2777 # of markers (markerstoiter) which is bigger than the set
2776 # of markers we want to display (markers)
2778 # of markers we want to display (markers)
2777 # this can happen if both --index and --rev options are
2779 # this can happen if both --index and --rev options are
2778 # provided and thus we need to iterate over all of the markers
2780 # provided and thus we need to iterate over all of the markers
2779 # to get the correct indices, but only display the ones that
2781 # to get the correct indices, but only display the ones that
2780 # are relevant to --rev value
2782 # are relevant to --rev value
2781 continue
2783 continue
2782 fm.startitem()
2784 fm.startitem()
2783 ind = i if opts.get(b'index') else None
2785 ind = i if opts.get(b'index') else None
2784 cmdutil.showmarker(fm, m, index=ind)
2786 cmdutil.showmarker(fm, m, index=ind)
2785 fm.end()
2787 fm.end()
2786
2788
2787
2789
2788 @command(
2790 @command(
2789 b'debugp1copies',
2791 b'debugp1copies',
2790 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2792 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2791 _(b'[-r REV]'),
2793 _(b'[-r REV]'),
2792 )
2794 )
2793 def debugp1copies(ui, repo, **opts):
2795 def debugp1copies(ui, repo, **opts):
2794 """dump copy information compared to p1"""
2796 """dump copy information compared to p1"""
2795
2797
2796 opts = pycompat.byteskwargs(opts)
2798 opts = pycompat.byteskwargs(opts)
2797 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2799 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2798 for dst, src in ctx.p1copies().items():
2800 for dst, src in ctx.p1copies().items():
2799 ui.write(b'%s -> %s\n' % (src, dst))
2801 ui.write(b'%s -> %s\n' % (src, dst))
2800
2802
2801
2803
2802 @command(
2804 @command(
2803 b'debugp2copies',
2805 b'debugp2copies',
2804 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2806 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2805 _(b'[-r REV]'),
2807 _(b'[-r REV]'),
2806 )
2808 )
2807 def debugp1copies(ui, repo, **opts):
2809 def debugp1copies(ui, repo, **opts):
2808 """dump copy information compared to p2"""
2810 """dump copy information compared to p2"""
2809
2811
2810 opts = pycompat.byteskwargs(opts)
2812 opts = pycompat.byteskwargs(opts)
2811 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2813 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2812 for dst, src in ctx.p2copies().items():
2814 for dst, src in ctx.p2copies().items():
2813 ui.write(b'%s -> %s\n' % (src, dst))
2815 ui.write(b'%s -> %s\n' % (src, dst))
2814
2816
2815
2817
2816 @command(
2818 @command(
2817 b'debugpathcomplete',
2819 b'debugpathcomplete',
2818 [
2820 [
2819 (b'f', b'full', None, _(b'complete an entire path')),
2821 (b'f', b'full', None, _(b'complete an entire path')),
2820 (b'n', b'normal', None, _(b'show only normal files')),
2822 (b'n', b'normal', None, _(b'show only normal files')),
2821 (b'a', b'added', None, _(b'show only added files')),
2823 (b'a', b'added', None, _(b'show only added files')),
2822 (b'r', b'removed', None, _(b'show only removed files')),
2824 (b'r', b'removed', None, _(b'show only removed files')),
2823 ],
2825 ],
2824 _(b'FILESPEC...'),
2826 _(b'FILESPEC...'),
2825 )
2827 )
2826 def debugpathcomplete(ui, repo, *specs, **opts):
2828 def debugpathcomplete(ui, repo, *specs, **opts):
2827 """complete part or all of a tracked path
2829 """complete part or all of a tracked path
2828
2830
2829 This command supports shells that offer path name completion. It
2831 This command supports shells that offer path name completion. It
2830 currently completes only files already known to the dirstate.
2832 currently completes only files already known to the dirstate.
2831
2833
2832 Completion extends only to the next path segment unless
2834 Completion extends only to the next path segment unless
2833 --full is specified, in which case entire paths are used."""
2835 --full is specified, in which case entire paths are used."""
2834
2836
2835 def complete(path, acceptable):
2837 def complete(path, acceptable):
2836 dirstate = repo.dirstate
2838 dirstate = repo.dirstate
2837 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2839 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2838 rootdir = repo.root + pycompat.ossep
2840 rootdir = repo.root + pycompat.ossep
2839 if spec != repo.root and not spec.startswith(rootdir):
2841 if spec != repo.root and not spec.startswith(rootdir):
2840 return [], []
2842 return [], []
2841 if os.path.isdir(spec):
2843 if os.path.isdir(spec):
2842 spec += b'/'
2844 spec += b'/'
2843 spec = spec[len(rootdir) :]
2845 spec = spec[len(rootdir) :]
2844 fixpaths = pycompat.ossep != b'/'
2846 fixpaths = pycompat.ossep != b'/'
2845 if fixpaths:
2847 if fixpaths:
2846 spec = spec.replace(pycompat.ossep, b'/')
2848 spec = spec.replace(pycompat.ossep, b'/')
2847 speclen = len(spec)
2849 speclen = len(spec)
2848 fullpaths = opts['full']
2850 fullpaths = opts['full']
2849 files, dirs = set(), set()
2851 files, dirs = set(), set()
2850 adddir, addfile = dirs.add, files.add
2852 adddir, addfile = dirs.add, files.add
2851 for f, st in dirstate.items():
2853 for f, st in dirstate.items():
2852 if f.startswith(spec) and st.state in acceptable:
2854 if f.startswith(spec) and st.state in acceptable:
2853 if fixpaths:
2855 if fixpaths:
2854 f = f.replace(b'/', pycompat.ossep)
2856 f = f.replace(b'/', pycompat.ossep)
2855 if fullpaths:
2857 if fullpaths:
2856 addfile(f)
2858 addfile(f)
2857 continue
2859 continue
2858 s = f.find(pycompat.ossep, speclen)
2860 s = f.find(pycompat.ossep, speclen)
2859 if s >= 0:
2861 if s >= 0:
2860 adddir(f[:s])
2862 adddir(f[:s])
2861 else:
2863 else:
2862 addfile(f)
2864 addfile(f)
2863 return files, dirs
2865 return files, dirs
2864
2866
2865 acceptable = b''
2867 acceptable = b''
2866 if opts['normal']:
2868 if opts['normal']:
2867 acceptable += b'nm'
2869 acceptable += b'nm'
2868 if opts['added']:
2870 if opts['added']:
2869 acceptable += b'a'
2871 acceptable += b'a'
2870 if opts['removed']:
2872 if opts['removed']:
2871 acceptable += b'r'
2873 acceptable += b'r'
2872 cwd = repo.getcwd()
2874 cwd = repo.getcwd()
2873 if not specs:
2875 if not specs:
2874 specs = [b'.']
2876 specs = [b'.']
2875
2877
2876 files, dirs = set(), set()
2878 files, dirs = set(), set()
2877 for spec in specs:
2879 for spec in specs:
2878 f, d = complete(spec, acceptable or b'nmar')
2880 f, d = complete(spec, acceptable or b'nmar')
2879 files.update(f)
2881 files.update(f)
2880 dirs.update(d)
2882 dirs.update(d)
2881 files.update(dirs)
2883 files.update(dirs)
2882 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2884 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2883 ui.write(b'\n')
2885 ui.write(b'\n')
2884
2886
2885
2887
2886 @command(
2888 @command(
2887 b'debugpathcopies',
2889 b'debugpathcopies',
2888 cmdutil.walkopts,
2890 cmdutil.walkopts,
2889 b'hg debugpathcopies REV1 REV2 [FILE]',
2891 b'hg debugpathcopies REV1 REV2 [FILE]',
2890 inferrepo=True,
2892 inferrepo=True,
2891 )
2893 )
2892 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2894 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2893 """show copies between two revisions"""
2895 """show copies between two revisions"""
2894 ctx1 = scmutil.revsingle(repo, rev1)
2896 ctx1 = scmutil.revsingle(repo, rev1)
2895 ctx2 = scmutil.revsingle(repo, rev2)
2897 ctx2 = scmutil.revsingle(repo, rev2)
2896 m = scmutil.match(ctx1, pats, opts)
2898 m = scmutil.match(ctx1, pats, opts)
2897 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2899 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2898 ui.write(b'%s -> %s\n' % (src, dst))
2900 ui.write(b'%s -> %s\n' % (src, dst))
2899
2901
2900
2902
2901 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2903 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2902 def debugpeer(ui, path):
2904 def debugpeer(ui, path):
2903 """establish a connection to a peer repository"""
2905 """establish a connection to a peer repository"""
2904 # Always enable peer request logging. Requires --debug to display
2906 # Always enable peer request logging. Requires --debug to display
2905 # though.
2907 # though.
2906 overrides = {
2908 overrides = {
2907 (b'devel', b'debug.peer-request'): True,
2909 (b'devel', b'debug.peer-request'): True,
2908 }
2910 }
2909
2911
2910 with ui.configoverride(overrides):
2912 with ui.configoverride(overrides):
2911 peer = hg.peer(ui, {}, path)
2913 peer = hg.peer(ui, {}, path)
2912
2914
2913 try:
2915 try:
2914 local = peer.local() is not None
2916 local = peer.local() is not None
2915 canpush = peer.canpush()
2917 canpush = peer.canpush()
2916
2918
2917 ui.write(_(b'url: %s\n') % peer.url())
2919 ui.write(_(b'url: %s\n') % peer.url())
2918 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2920 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2919 ui.write(
2921 ui.write(
2920 _(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no'))
2922 _(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no'))
2921 )
2923 )
2922 finally:
2924 finally:
2923 peer.close()
2925 peer.close()
2924
2926
2925
2927
2926 @command(
2928 @command(
2927 b'debugpickmergetool',
2929 b'debugpickmergetool',
2928 [
2930 [
2929 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2931 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2930 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2932 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2931 ]
2933 ]
2932 + cmdutil.walkopts
2934 + cmdutil.walkopts
2933 + cmdutil.mergetoolopts,
2935 + cmdutil.mergetoolopts,
2934 _(b'[PATTERN]...'),
2936 _(b'[PATTERN]...'),
2935 inferrepo=True,
2937 inferrepo=True,
2936 )
2938 )
2937 def debugpickmergetool(ui, repo, *pats, **opts):
2939 def debugpickmergetool(ui, repo, *pats, **opts):
2938 """examine which merge tool is chosen for specified file
2940 """examine which merge tool is chosen for specified file
2939
2941
2940 As described in :hg:`help merge-tools`, Mercurial examines
2942 As described in :hg:`help merge-tools`, Mercurial examines
2941 configurations below in this order to decide which merge tool is
2943 configurations below in this order to decide which merge tool is
2942 chosen for specified file.
2944 chosen for specified file.
2943
2945
2944 1. ``--tool`` option
2946 1. ``--tool`` option
2945 2. ``HGMERGE`` environment variable
2947 2. ``HGMERGE`` environment variable
2946 3. configurations in ``merge-patterns`` section
2948 3. configurations in ``merge-patterns`` section
2947 4. configuration of ``ui.merge``
2949 4. configuration of ``ui.merge``
2948 5. configurations in ``merge-tools`` section
2950 5. configurations in ``merge-tools`` section
2949 6. ``hgmerge`` tool (for historical reason only)
2951 6. ``hgmerge`` tool (for historical reason only)
2950 7. default tool for fallback (``:merge`` or ``:prompt``)
2952 7. default tool for fallback (``:merge`` or ``:prompt``)
2951
2953
2952 This command writes out examination result in the style below::
2954 This command writes out examination result in the style below::
2953
2955
2954 FILE = MERGETOOL
2956 FILE = MERGETOOL
2955
2957
2956 By default, all files known in the first parent context of the
2958 By default, all files known in the first parent context of the
2957 working directory are examined. Use file patterns and/or -I/-X
2959 working directory are examined. Use file patterns and/or -I/-X
2958 options to limit target files. -r/--rev is also useful to examine
2960 options to limit target files. -r/--rev is also useful to examine
2959 files in another context without actual updating to it.
2961 files in another context without actual updating to it.
2960
2962
2961 With --debug, this command shows warning messages while matching
2963 With --debug, this command shows warning messages while matching
2962 against ``merge-patterns`` and so on, too. It is recommended to
2964 against ``merge-patterns`` and so on, too. It is recommended to
2963 use this option with explicit file patterns and/or -I/-X options,
2965 use this option with explicit file patterns and/or -I/-X options,
2964 because this option increases amount of output per file according
2966 because this option increases amount of output per file according
2965 to configurations in hgrc.
2967 to configurations in hgrc.
2966
2968
2967 With -v/--verbose, this command shows configurations below at
2969 With -v/--verbose, this command shows configurations below at
2968 first (only if specified).
2970 first (only if specified).
2969
2971
2970 - ``--tool`` option
2972 - ``--tool`` option
2971 - ``HGMERGE`` environment variable
2973 - ``HGMERGE`` environment variable
2972 - configuration of ``ui.merge``
2974 - configuration of ``ui.merge``
2973
2975
2974 If merge tool is chosen before matching against
2976 If merge tool is chosen before matching against
2975 ``merge-patterns``, this command can't show any helpful
2977 ``merge-patterns``, this command can't show any helpful
2976 information, even with --debug. In such case, information above is
2978 information, even with --debug. In such case, information above is
2977 useful to know why a merge tool is chosen.
2979 useful to know why a merge tool is chosen.
2978 """
2980 """
2979 opts = pycompat.byteskwargs(opts)
2981 opts = pycompat.byteskwargs(opts)
2980 overrides = {}
2982 overrides = {}
2981 if opts[b'tool']:
2983 if opts[b'tool']:
2982 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
2984 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
2983 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
2985 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
2984
2986
2985 with ui.configoverride(overrides, b'debugmergepatterns'):
2987 with ui.configoverride(overrides, b'debugmergepatterns'):
2986 hgmerge = encoding.environ.get(b"HGMERGE")
2988 hgmerge = encoding.environ.get(b"HGMERGE")
2987 if hgmerge is not None:
2989 if hgmerge is not None:
2988 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
2990 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
2989 uimerge = ui.config(b"ui", b"merge")
2991 uimerge = ui.config(b"ui", b"merge")
2990 if uimerge:
2992 if uimerge:
2991 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
2993 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
2992
2994
2993 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2995 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2994 m = scmutil.match(ctx, pats, opts)
2996 m = scmutil.match(ctx, pats, opts)
2995 changedelete = opts[b'changedelete']
2997 changedelete = opts[b'changedelete']
2996 for path in ctx.walk(m):
2998 for path in ctx.walk(m):
2997 fctx = ctx[path]
2999 fctx = ctx[path]
2998 with ui.silent(
3000 with ui.silent(
2999 error=True
3001 error=True
3000 ) if not ui.debugflag else util.nullcontextmanager():
3002 ) if not ui.debugflag else util.nullcontextmanager():
3001 tool, toolpath = filemerge._picktool(
3003 tool, toolpath = filemerge._picktool(
3002 repo,
3004 repo,
3003 ui,
3005 ui,
3004 path,
3006 path,
3005 fctx.isbinary(),
3007 fctx.isbinary(),
3006 b'l' in fctx.flags(),
3008 b'l' in fctx.flags(),
3007 changedelete,
3009 changedelete,
3008 )
3010 )
3009 ui.write(b'%s = %s\n' % (path, tool))
3011 ui.write(b'%s = %s\n' % (path, tool))
3010
3012
3011
3013
3012 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
3014 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
3013 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
3015 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
3014 """access the pushkey key/value protocol
3016 """access the pushkey key/value protocol
3015
3017
3016 With two args, list the keys in the given namespace.
3018 With two args, list the keys in the given namespace.
3017
3019
3018 With five args, set a key to new if it currently is set to old.
3020 With five args, set a key to new if it currently is set to old.
3019 Reports success or failure.
3021 Reports success or failure.
3020 """
3022 """
3021
3023
3022 target = hg.peer(ui, {}, repopath)
3024 target = hg.peer(ui, {}, repopath)
3023 try:
3025 try:
3024 if keyinfo:
3026 if keyinfo:
3025 key, old, new = keyinfo
3027 key, old, new = keyinfo
3026 with target.commandexecutor() as e:
3028 with target.commandexecutor() as e:
3027 r = e.callcommand(
3029 r = e.callcommand(
3028 b'pushkey',
3030 b'pushkey',
3029 {
3031 {
3030 b'namespace': namespace,
3032 b'namespace': namespace,
3031 b'key': key,
3033 b'key': key,
3032 b'old': old,
3034 b'old': old,
3033 b'new': new,
3035 b'new': new,
3034 },
3036 },
3035 ).result()
3037 ).result()
3036
3038
3037 ui.status(pycompat.bytestr(r) + b'\n')
3039 ui.status(pycompat.bytestr(r) + b'\n')
3038 return not r
3040 return not r
3039 else:
3041 else:
3040 for k, v in sorted(target.listkeys(namespace).items()):
3042 for k, v in sorted(target.listkeys(namespace).items()):
3041 ui.write(
3043 ui.write(
3042 b"%s\t%s\n"
3044 b"%s\t%s\n"
3043 % (stringutil.escapestr(k), stringutil.escapestr(v))
3045 % (stringutil.escapestr(k), stringutil.escapestr(v))
3044 )
3046 )
3045 finally:
3047 finally:
3046 target.close()
3048 target.close()
3047
3049
3048
3050
3049 @command(b'debugpvec', [], _(b'A B'))
3051 @command(b'debugpvec', [], _(b'A B'))
3050 def debugpvec(ui, repo, a, b=None):
3052 def debugpvec(ui, repo, a, b=None):
3051 ca = scmutil.revsingle(repo, a)
3053 ca = scmutil.revsingle(repo, a)
3052 cb = scmutil.revsingle(repo, b)
3054 cb = scmutil.revsingle(repo, b)
3053 pa = pvec.ctxpvec(ca)
3055 pa = pvec.ctxpvec(ca)
3054 pb = pvec.ctxpvec(cb)
3056 pb = pvec.ctxpvec(cb)
3055 if pa == pb:
3057 if pa == pb:
3056 rel = b"="
3058 rel = b"="
3057 elif pa > pb:
3059 elif pa > pb:
3058 rel = b">"
3060 rel = b">"
3059 elif pa < pb:
3061 elif pa < pb:
3060 rel = b"<"
3062 rel = b"<"
3061 elif pa | pb:
3063 elif pa | pb:
3062 rel = b"|"
3064 rel = b"|"
3063 ui.write(_(b"a: %s\n") % pa)
3065 ui.write(_(b"a: %s\n") % pa)
3064 ui.write(_(b"b: %s\n") % pb)
3066 ui.write(_(b"b: %s\n") % pb)
3065 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
3067 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
3066 ui.write(
3068 ui.write(
3067 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
3069 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
3068 % (
3070 % (
3069 abs(pa._depth - pb._depth),
3071 abs(pa._depth - pb._depth),
3070 pvec._hamming(pa._vec, pb._vec),
3072 pvec._hamming(pa._vec, pb._vec),
3071 pa.distance(pb),
3073 pa.distance(pb),
3072 rel,
3074 rel,
3073 )
3075 )
3074 )
3076 )
3075
3077
3076
3078
3077 @command(
3079 @command(
3078 b'debugrebuilddirstate|debugrebuildstate',
3080 b'debugrebuilddirstate|debugrebuildstate',
3079 [
3081 [
3080 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
3082 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
3081 (
3083 (
3082 b'',
3084 b'',
3083 b'minimal',
3085 b'minimal',
3084 None,
3086 None,
3085 _(
3087 _(
3086 b'only rebuild files that are inconsistent with '
3088 b'only rebuild files that are inconsistent with '
3087 b'the working copy parent'
3089 b'the working copy parent'
3088 ),
3090 ),
3089 ),
3091 ),
3090 ],
3092 ],
3091 _(b'[-r REV]'),
3093 _(b'[-r REV]'),
3092 )
3094 )
3093 def debugrebuilddirstate(ui, repo, rev, **opts):
3095 def debugrebuilddirstate(ui, repo, rev, **opts):
3094 """rebuild the dirstate as it would look like for the given revision
3096 """rebuild the dirstate as it would look like for the given revision
3095
3097
3096 If no revision is specified the first current parent will be used.
3098 If no revision is specified the first current parent will be used.
3097
3099
3098 The dirstate will be set to the files of the given revision.
3100 The dirstate will be set to the files of the given revision.
3099 The actual working directory content or existing dirstate
3101 The actual working directory content or existing dirstate
3100 information such as adds or removes is not considered.
3102 information such as adds or removes is not considered.
3101
3103
3102 ``minimal`` will only rebuild the dirstate status for files that claim to be
3104 ``minimal`` will only rebuild the dirstate status for files that claim to be
3103 tracked but are not in the parent manifest, or that exist in the parent
3105 tracked but are not in the parent manifest, or that exist in the parent
3104 manifest but are not in the dirstate. It will not change adds, removes, or
3106 manifest but are not in the dirstate. It will not change adds, removes, or
3105 modified files that are in the working copy parent.
3107 modified files that are in the working copy parent.
3106
3108
3107 One use of this command is to make the next :hg:`status` invocation
3109 One use of this command is to make the next :hg:`status` invocation
3108 check the actual file content.
3110 check the actual file content.
3109 """
3111 """
3110 ctx = scmutil.revsingle(repo, rev)
3112 ctx = scmutil.revsingle(repo, rev)
3111 with repo.wlock():
3113 with repo.wlock():
3112 dirstate = repo.dirstate
3114 dirstate = repo.dirstate
3113 changedfiles = None
3115 changedfiles = None
3114 # See command doc for what minimal does.
3116 # See command doc for what minimal does.
3115 if opts.get('minimal'):
3117 if opts.get('minimal'):
3116 manifestfiles = set(ctx.manifest().keys())
3118 manifestfiles = set(ctx.manifest().keys())
3117 dirstatefiles = set(dirstate)
3119 dirstatefiles = set(dirstate)
3118 manifestonly = manifestfiles - dirstatefiles
3120 manifestonly = manifestfiles - dirstatefiles
3119 dsonly = dirstatefiles - manifestfiles
3121 dsonly = dirstatefiles - manifestfiles
3120 dsnotadded = {f for f in dsonly if not dirstate.get_entry(f).added}
3122 dsnotadded = {f for f in dsonly if not dirstate.get_entry(f).added}
3121 changedfiles = manifestonly | dsnotadded
3123 changedfiles = manifestonly | dsnotadded
3122
3124
3123 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
3125 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
3124
3126
3125
3127
3126 @command(
3128 @command(
3127 b'debugrebuildfncache',
3129 b'debugrebuildfncache',
3128 [
3130 [
3129 (
3131 (
3130 b'',
3132 b'',
3131 b'only-data',
3133 b'only-data',
3132 False,
3134 False,
3133 _(b'only look for wrong .d files (much faster)'),
3135 _(b'only look for wrong .d files (much faster)'),
3134 )
3136 )
3135 ],
3137 ],
3136 b'',
3138 b'',
3137 )
3139 )
3138 def debugrebuildfncache(ui, repo, **opts):
3140 def debugrebuildfncache(ui, repo, **opts):
3139 """rebuild the fncache file"""
3141 """rebuild the fncache file"""
3140 opts = pycompat.byteskwargs(opts)
3142 opts = pycompat.byteskwargs(opts)
3141 repair.rebuildfncache(ui, repo, opts.get(b"only_data"))
3143 repair.rebuildfncache(ui, repo, opts.get(b"only_data"))
3142
3144
3143
3145
3144 @command(
3146 @command(
3145 b'debugrename',
3147 b'debugrename',
3146 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
3148 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
3147 _(b'[-r REV] [FILE]...'),
3149 _(b'[-r REV] [FILE]...'),
3148 )
3150 )
3149 def debugrename(ui, repo, *pats, **opts):
3151 def debugrename(ui, repo, *pats, **opts):
3150 """dump rename information"""
3152 """dump rename information"""
3151
3153
3152 opts = pycompat.byteskwargs(opts)
3154 opts = pycompat.byteskwargs(opts)
3153 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
3155 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
3154 m = scmutil.match(ctx, pats, opts)
3156 m = scmutil.match(ctx, pats, opts)
3155 for abs in ctx.walk(m):
3157 for abs in ctx.walk(m):
3156 fctx = ctx[abs]
3158 fctx = ctx[abs]
3157 o = fctx.filelog().renamed(fctx.filenode())
3159 o = fctx.filelog().renamed(fctx.filenode())
3158 rel = repo.pathto(abs)
3160 rel = repo.pathto(abs)
3159 if o:
3161 if o:
3160 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
3162 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
3161 else:
3163 else:
3162 ui.write(_(b"%s not renamed\n") % rel)
3164 ui.write(_(b"%s not renamed\n") % rel)
3163
3165
3164
3166
3165 @command(b'debugrequires|debugrequirements', [], b'')
3167 @command(b'debugrequires|debugrequirements', [], b'')
3166 def debugrequirements(ui, repo):
3168 def debugrequirements(ui, repo):
3167 """print the current repo requirements"""
3169 """print the current repo requirements"""
3168 for r in sorted(repo.requirements):
3170 for r in sorted(repo.requirements):
3169 ui.write(b"%s\n" % r)
3171 ui.write(b"%s\n" % r)
3170
3172
3171
3173
3172 @command(
3174 @command(
3173 b'debugrevlog',
3175 b'debugrevlog',
3174 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
3176 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
3175 _(b'-c|-m|FILE'),
3177 _(b'-c|-m|FILE'),
3176 optionalrepo=True,
3178 optionalrepo=True,
3177 )
3179 )
3178 def debugrevlog(ui, repo, file_=None, **opts):
3180 def debugrevlog(ui, repo, file_=None, **opts):
3179 """show data and statistics about a revlog"""
3181 """show data and statistics about a revlog"""
3180 opts = pycompat.byteskwargs(opts)
3182 opts = pycompat.byteskwargs(opts)
3181 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
3183 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
3182
3184
3183 if opts.get(b"dump"):
3185 if opts.get(b"dump"):
3184 numrevs = len(r)
3186 numrevs = len(r)
3185 ui.write(
3187 ui.write(
3186 (
3188 (
3187 b"# rev p1rev p2rev start end deltastart base p1 p2"
3189 b"# rev p1rev p2rev start end deltastart base p1 p2"
3188 b" rawsize totalsize compression heads chainlen\n"
3190 b" rawsize totalsize compression heads chainlen\n"
3189 )
3191 )
3190 )
3192 )
3191 ts = 0
3193 ts = 0
3192 heads = set()
3194 heads = set()
3193
3195
3194 for rev in pycompat.xrange(numrevs):
3196 for rev in pycompat.xrange(numrevs):
3195 dbase = r.deltaparent(rev)
3197 dbase = r.deltaparent(rev)
3196 if dbase == -1:
3198 if dbase == -1:
3197 dbase = rev
3199 dbase = rev
3198 cbase = r.chainbase(rev)
3200 cbase = r.chainbase(rev)
3199 clen = r.chainlen(rev)
3201 clen = r.chainlen(rev)
3200 p1, p2 = r.parentrevs(rev)
3202 p1, p2 = r.parentrevs(rev)
3201 rs = r.rawsize(rev)
3203 rs = r.rawsize(rev)
3202 ts = ts + rs
3204 ts = ts + rs
3203 heads -= set(r.parentrevs(rev))
3205 heads -= set(r.parentrevs(rev))
3204 heads.add(rev)
3206 heads.add(rev)
3205 try:
3207 try:
3206 compression = ts / r.end(rev)
3208 compression = ts / r.end(rev)
3207 except ZeroDivisionError:
3209 except ZeroDivisionError:
3208 compression = 0
3210 compression = 0
3209 ui.write(
3211 ui.write(
3210 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
3212 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
3211 b"%11d %5d %8d\n"
3213 b"%11d %5d %8d\n"
3212 % (
3214 % (
3213 rev,
3215 rev,
3214 p1,
3216 p1,
3215 p2,
3217 p2,
3216 r.start(rev),
3218 r.start(rev),
3217 r.end(rev),
3219 r.end(rev),
3218 r.start(dbase),
3220 r.start(dbase),
3219 r.start(cbase),
3221 r.start(cbase),
3220 r.start(p1),
3222 r.start(p1),
3221 r.start(p2),
3223 r.start(p2),
3222 rs,
3224 rs,
3223 ts,
3225 ts,
3224 compression,
3226 compression,
3225 len(heads),
3227 len(heads),
3226 clen,
3228 clen,
3227 )
3229 )
3228 )
3230 )
3229 return 0
3231 return 0
3230
3232
3231 format = r._format_version
3233 format = r._format_version
3232 v = r._format_flags
3234 v = r._format_flags
3233 flags = []
3235 flags = []
3234 gdelta = False
3236 gdelta = False
3235 if v & revlog.FLAG_INLINE_DATA:
3237 if v & revlog.FLAG_INLINE_DATA:
3236 flags.append(b'inline')
3238 flags.append(b'inline')
3237 if v & revlog.FLAG_GENERALDELTA:
3239 if v & revlog.FLAG_GENERALDELTA:
3238 gdelta = True
3240 gdelta = True
3239 flags.append(b'generaldelta')
3241 flags.append(b'generaldelta')
3240 if not flags:
3242 if not flags:
3241 flags = [b'(none)']
3243 flags = [b'(none)']
3242
3244
3243 ### tracks merge vs single parent
3245 ### tracks merge vs single parent
3244 nummerges = 0
3246 nummerges = 0
3245
3247
3246 ### tracks ways the "delta" are build
3248 ### tracks ways the "delta" are build
3247 # nodelta
3249 # nodelta
3248 numempty = 0
3250 numempty = 0
3249 numemptytext = 0
3251 numemptytext = 0
3250 numemptydelta = 0
3252 numemptydelta = 0
3251 # full file content
3253 # full file content
3252 numfull = 0
3254 numfull = 0
3253 # intermediate snapshot against a prior snapshot
3255 # intermediate snapshot against a prior snapshot
3254 numsemi = 0
3256 numsemi = 0
3255 # snapshot count per depth
3257 # snapshot count per depth
3256 numsnapdepth = collections.defaultdict(lambda: 0)
3258 numsnapdepth = collections.defaultdict(lambda: 0)
3257 # delta against previous revision
3259 # delta against previous revision
3258 numprev = 0
3260 numprev = 0
3259 # delta against first or second parent (not prev)
3261 # delta against first or second parent (not prev)
3260 nump1 = 0
3262 nump1 = 0
3261 nump2 = 0
3263 nump2 = 0
3262 # delta against neither prev nor parents
3264 # delta against neither prev nor parents
3263 numother = 0
3265 numother = 0
3264 # delta against prev that are also first or second parent
3266 # delta against prev that are also first or second parent
3265 # (details of `numprev`)
3267 # (details of `numprev`)
3266 nump1prev = 0
3268 nump1prev = 0
3267 nump2prev = 0
3269 nump2prev = 0
3268
3270
3269 # data about delta chain of each revs
3271 # data about delta chain of each revs
3270 chainlengths = []
3272 chainlengths = []
3271 chainbases = []
3273 chainbases = []
3272 chainspans = []
3274 chainspans = []
3273
3275
3274 # data about each revision
3276 # data about each revision
3275 datasize = [None, 0, 0]
3277 datasize = [None, 0, 0]
3276 fullsize = [None, 0, 0]
3278 fullsize = [None, 0, 0]
3277 semisize = [None, 0, 0]
3279 semisize = [None, 0, 0]
3278 # snapshot count per depth
3280 # snapshot count per depth
3279 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
3281 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
3280 deltasize = [None, 0, 0]
3282 deltasize = [None, 0, 0]
3281 chunktypecounts = {}
3283 chunktypecounts = {}
3282 chunktypesizes = {}
3284 chunktypesizes = {}
3283
3285
3284 def addsize(size, l):
3286 def addsize(size, l):
3285 if l[0] is None or size < l[0]:
3287 if l[0] is None or size < l[0]:
3286 l[0] = size
3288 l[0] = size
3287 if size > l[1]:
3289 if size > l[1]:
3288 l[1] = size
3290 l[1] = size
3289 l[2] += size
3291 l[2] += size
3290
3292
3291 numrevs = len(r)
3293 numrevs = len(r)
3292 for rev in pycompat.xrange(numrevs):
3294 for rev in pycompat.xrange(numrevs):
3293 p1, p2 = r.parentrevs(rev)
3295 p1, p2 = r.parentrevs(rev)
3294 delta = r.deltaparent(rev)
3296 delta = r.deltaparent(rev)
3295 if format > 0:
3297 if format > 0:
3296 addsize(r.rawsize(rev), datasize)
3298 addsize(r.rawsize(rev), datasize)
3297 if p2 != nullrev:
3299 if p2 != nullrev:
3298 nummerges += 1
3300 nummerges += 1
3299 size = r.length(rev)
3301 size = r.length(rev)
3300 if delta == nullrev:
3302 if delta == nullrev:
3301 chainlengths.append(0)
3303 chainlengths.append(0)
3302 chainbases.append(r.start(rev))
3304 chainbases.append(r.start(rev))
3303 chainspans.append(size)
3305 chainspans.append(size)
3304 if size == 0:
3306 if size == 0:
3305 numempty += 1
3307 numempty += 1
3306 numemptytext += 1
3308 numemptytext += 1
3307 else:
3309 else:
3308 numfull += 1
3310 numfull += 1
3309 numsnapdepth[0] += 1
3311 numsnapdepth[0] += 1
3310 addsize(size, fullsize)
3312 addsize(size, fullsize)
3311 addsize(size, snapsizedepth[0])
3313 addsize(size, snapsizedepth[0])
3312 else:
3314 else:
3313 chainlengths.append(chainlengths[delta] + 1)
3315 chainlengths.append(chainlengths[delta] + 1)
3314 baseaddr = chainbases[delta]
3316 baseaddr = chainbases[delta]
3315 revaddr = r.start(rev)
3317 revaddr = r.start(rev)
3316 chainbases.append(baseaddr)
3318 chainbases.append(baseaddr)
3317 chainspans.append((revaddr - baseaddr) + size)
3319 chainspans.append((revaddr - baseaddr) + size)
3318 if size == 0:
3320 if size == 0:
3319 numempty += 1
3321 numempty += 1
3320 numemptydelta += 1
3322 numemptydelta += 1
3321 elif r.issnapshot(rev):
3323 elif r.issnapshot(rev):
3322 addsize(size, semisize)
3324 addsize(size, semisize)
3323 numsemi += 1
3325 numsemi += 1
3324 depth = r.snapshotdepth(rev)
3326 depth = r.snapshotdepth(rev)
3325 numsnapdepth[depth] += 1
3327 numsnapdepth[depth] += 1
3326 addsize(size, snapsizedepth[depth])
3328 addsize(size, snapsizedepth[depth])
3327 else:
3329 else:
3328 addsize(size, deltasize)
3330 addsize(size, deltasize)
3329 if delta == rev - 1:
3331 if delta == rev - 1:
3330 numprev += 1
3332 numprev += 1
3331 if delta == p1:
3333 if delta == p1:
3332 nump1prev += 1
3334 nump1prev += 1
3333 elif delta == p2:
3335 elif delta == p2:
3334 nump2prev += 1
3336 nump2prev += 1
3335 elif delta == p1:
3337 elif delta == p1:
3336 nump1 += 1
3338 nump1 += 1
3337 elif delta == p2:
3339 elif delta == p2:
3338 nump2 += 1
3340 nump2 += 1
3339 elif delta != nullrev:
3341 elif delta != nullrev:
3340 numother += 1
3342 numother += 1
3341
3343
3342 # Obtain data on the raw chunks in the revlog.
3344 # Obtain data on the raw chunks in the revlog.
3343 if util.safehasattr(r, b'_getsegmentforrevs'):
3345 if util.safehasattr(r, b'_getsegmentforrevs'):
3344 segment = r._getsegmentforrevs(rev, rev)[1]
3346 segment = r._getsegmentforrevs(rev, rev)[1]
3345 else:
3347 else:
3346 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
3348 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
3347 if segment:
3349 if segment:
3348 chunktype = bytes(segment[0:1])
3350 chunktype = bytes(segment[0:1])
3349 else:
3351 else:
3350 chunktype = b'empty'
3352 chunktype = b'empty'
3351
3353
3352 if chunktype not in chunktypecounts:
3354 if chunktype not in chunktypecounts:
3353 chunktypecounts[chunktype] = 0
3355 chunktypecounts[chunktype] = 0
3354 chunktypesizes[chunktype] = 0
3356 chunktypesizes[chunktype] = 0
3355
3357
3356 chunktypecounts[chunktype] += 1
3358 chunktypecounts[chunktype] += 1
3357 chunktypesizes[chunktype] += size
3359 chunktypesizes[chunktype] += size
3358
3360
3359 # Adjust size min value for empty cases
3361 # Adjust size min value for empty cases
3360 for size in (datasize, fullsize, semisize, deltasize):
3362 for size in (datasize, fullsize, semisize, deltasize):
3361 if size[0] is None:
3363 if size[0] is None:
3362 size[0] = 0
3364 size[0] = 0
3363
3365
3364 numdeltas = numrevs - numfull - numempty - numsemi
3366 numdeltas = numrevs - numfull - numempty - numsemi
3365 numoprev = numprev - nump1prev - nump2prev
3367 numoprev = numprev - nump1prev - nump2prev
3366 totalrawsize = datasize[2]
3368 totalrawsize = datasize[2]
3367 datasize[2] /= numrevs
3369 datasize[2] /= numrevs
3368 fulltotal = fullsize[2]
3370 fulltotal = fullsize[2]
3369 if numfull == 0:
3371 if numfull == 0:
3370 fullsize[2] = 0
3372 fullsize[2] = 0
3371 else:
3373 else:
3372 fullsize[2] /= numfull
3374 fullsize[2] /= numfull
3373 semitotal = semisize[2]
3375 semitotal = semisize[2]
3374 snaptotal = {}
3376 snaptotal = {}
3375 if numsemi > 0:
3377 if numsemi > 0:
3376 semisize[2] /= numsemi
3378 semisize[2] /= numsemi
3377 for depth in snapsizedepth:
3379 for depth in snapsizedepth:
3378 snaptotal[depth] = snapsizedepth[depth][2]
3380 snaptotal[depth] = snapsizedepth[depth][2]
3379 snapsizedepth[depth][2] /= numsnapdepth[depth]
3381 snapsizedepth[depth][2] /= numsnapdepth[depth]
3380
3382
3381 deltatotal = deltasize[2]
3383 deltatotal = deltasize[2]
3382 if numdeltas > 0:
3384 if numdeltas > 0:
3383 deltasize[2] /= numdeltas
3385 deltasize[2] /= numdeltas
3384 totalsize = fulltotal + semitotal + deltatotal
3386 totalsize = fulltotal + semitotal + deltatotal
3385 avgchainlen = sum(chainlengths) / numrevs
3387 avgchainlen = sum(chainlengths) / numrevs
3386 maxchainlen = max(chainlengths)
3388 maxchainlen = max(chainlengths)
3387 maxchainspan = max(chainspans)
3389 maxchainspan = max(chainspans)
3388 compratio = 1
3390 compratio = 1
3389 if totalsize:
3391 if totalsize:
3390 compratio = totalrawsize / totalsize
3392 compratio = totalrawsize / totalsize
3391
3393
3392 basedfmtstr = b'%%%dd\n'
3394 basedfmtstr = b'%%%dd\n'
3393 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
3395 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
3394
3396
3395 def dfmtstr(max):
3397 def dfmtstr(max):
3396 return basedfmtstr % len(str(max))
3398 return basedfmtstr % len(str(max))
3397
3399
3398 def pcfmtstr(max, padding=0):
3400 def pcfmtstr(max, padding=0):
3399 return basepcfmtstr % (len(str(max)), b' ' * padding)
3401 return basepcfmtstr % (len(str(max)), b' ' * padding)
3400
3402
3401 def pcfmt(value, total):
3403 def pcfmt(value, total):
3402 if total:
3404 if total:
3403 return (value, 100 * float(value) / total)
3405 return (value, 100 * float(value) / total)
3404 else:
3406 else:
3405 return value, 100.0
3407 return value, 100.0
3406
3408
3407 ui.writenoi18n(b'format : %d\n' % format)
3409 ui.writenoi18n(b'format : %d\n' % format)
3408 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
3410 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
3409
3411
3410 ui.write(b'\n')
3412 ui.write(b'\n')
3411 fmt = pcfmtstr(totalsize)
3413 fmt = pcfmtstr(totalsize)
3412 fmt2 = dfmtstr(totalsize)
3414 fmt2 = dfmtstr(totalsize)
3413 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3415 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3414 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
3416 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
3415 ui.writenoi18n(
3417 ui.writenoi18n(
3416 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
3418 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
3417 )
3419 )
3418 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3420 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3419 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
3421 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
3420 ui.writenoi18n(
3422 ui.writenoi18n(
3421 b' text : '
3423 b' text : '
3422 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
3424 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
3423 )
3425 )
3424 ui.writenoi18n(
3426 ui.writenoi18n(
3425 b' delta : '
3427 b' delta : '
3426 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
3428 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
3427 )
3429 )
3428 ui.writenoi18n(
3430 ui.writenoi18n(
3429 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
3431 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
3430 )
3432 )
3431 for depth in sorted(numsnapdepth):
3433 for depth in sorted(numsnapdepth):
3432 ui.write(
3434 ui.write(
3433 (b' lvl-%-3d : ' % depth)
3435 (b' lvl-%-3d : ' % depth)
3434 + fmt % pcfmt(numsnapdepth[depth], numrevs)
3436 + fmt % pcfmt(numsnapdepth[depth], numrevs)
3435 )
3437 )
3436 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
3438 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
3437 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
3439 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
3438 ui.writenoi18n(
3440 ui.writenoi18n(
3439 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
3441 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
3440 )
3442 )
3441 for depth in sorted(numsnapdepth):
3443 for depth in sorted(numsnapdepth):
3442 ui.write(
3444 ui.write(
3443 (b' lvl-%-3d : ' % depth)
3445 (b' lvl-%-3d : ' % depth)
3444 + fmt % pcfmt(snaptotal[depth], totalsize)
3446 + fmt % pcfmt(snaptotal[depth], totalsize)
3445 )
3447 )
3446 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
3448 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
3447
3449
3448 def fmtchunktype(chunktype):
3450 def fmtchunktype(chunktype):
3449 if chunktype == b'empty':
3451 if chunktype == b'empty':
3450 return b' %s : ' % chunktype
3452 return b' %s : ' % chunktype
3451 elif chunktype in pycompat.bytestr(string.ascii_letters):
3453 elif chunktype in pycompat.bytestr(string.ascii_letters):
3452 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
3454 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
3453 else:
3455 else:
3454 return b' 0x%s : ' % hex(chunktype)
3456 return b' 0x%s : ' % hex(chunktype)
3455
3457
3456 ui.write(b'\n')
3458 ui.write(b'\n')
3457 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
3459 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
3458 for chunktype in sorted(chunktypecounts):
3460 for chunktype in sorted(chunktypecounts):
3459 ui.write(fmtchunktype(chunktype))
3461 ui.write(fmtchunktype(chunktype))
3460 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
3462 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
3461 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
3463 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
3462 for chunktype in sorted(chunktypecounts):
3464 for chunktype in sorted(chunktypecounts):
3463 ui.write(fmtchunktype(chunktype))
3465 ui.write(fmtchunktype(chunktype))
3464 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
3466 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
3465
3467
3466 ui.write(b'\n')
3468 ui.write(b'\n')
3467 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
3469 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
3468 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
3470 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
3469 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
3471 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
3470 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
3472 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
3471 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
3473 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
3472
3474
3473 if format > 0:
3475 if format > 0:
3474 ui.write(b'\n')
3476 ui.write(b'\n')
3475 ui.writenoi18n(
3477 ui.writenoi18n(
3476 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
3478 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
3477 % tuple(datasize)
3479 % tuple(datasize)
3478 )
3480 )
3479 ui.writenoi18n(
3481 ui.writenoi18n(
3480 b'full revision size (min/max/avg) : %d / %d / %d\n'
3482 b'full revision size (min/max/avg) : %d / %d / %d\n'
3481 % tuple(fullsize)
3483 % tuple(fullsize)
3482 )
3484 )
3483 ui.writenoi18n(
3485 ui.writenoi18n(
3484 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
3486 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
3485 % tuple(semisize)
3487 % tuple(semisize)
3486 )
3488 )
3487 for depth in sorted(snapsizedepth):
3489 for depth in sorted(snapsizedepth):
3488 if depth == 0:
3490 if depth == 0:
3489 continue
3491 continue
3490 ui.writenoi18n(
3492 ui.writenoi18n(
3491 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
3493 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
3492 % ((depth,) + tuple(snapsizedepth[depth]))
3494 % ((depth,) + tuple(snapsizedepth[depth]))
3493 )
3495 )
3494 ui.writenoi18n(
3496 ui.writenoi18n(
3495 b'delta size (min/max/avg) : %d / %d / %d\n'
3497 b'delta size (min/max/avg) : %d / %d / %d\n'
3496 % tuple(deltasize)
3498 % tuple(deltasize)
3497 )
3499 )
3498
3500
3499 if numdeltas > 0:
3501 if numdeltas > 0:
3500 ui.write(b'\n')
3502 ui.write(b'\n')
3501 fmt = pcfmtstr(numdeltas)
3503 fmt = pcfmtstr(numdeltas)
3502 fmt2 = pcfmtstr(numdeltas, 4)
3504 fmt2 = pcfmtstr(numdeltas, 4)
3503 ui.writenoi18n(
3505 ui.writenoi18n(
3504 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
3506 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
3505 )
3507 )
3506 if numprev > 0:
3508 if numprev > 0:
3507 ui.writenoi18n(
3509 ui.writenoi18n(
3508 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
3510 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
3509 )
3511 )
3510 ui.writenoi18n(
3512 ui.writenoi18n(
3511 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
3513 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
3512 )
3514 )
3513 ui.writenoi18n(
3515 ui.writenoi18n(
3514 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
3516 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
3515 )
3517 )
3516 if gdelta:
3518 if gdelta:
3517 ui.writenoi18n(
3519 ui.writenoi18n(
3518 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
3520 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
3519 )
3521 )
3520 ui.writenoi18n(
3522 ui.writenoi18n(
3521 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
3523 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
3522 )
3524 )
3523 ui.writenoi18n(
3525 ui.writenoi18n(
3524 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
3526 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
3525 )
3527 )
3526
3528
3527
3529
3528 @command(
3530 @command(
3529 b'debugrevlogindex',
3531 b'debugrevlogindex',
3530 cmdutil.debugrevlogopts
3532 cmdutil.debugrevlogopts
3531 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3533 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3532 _(b'[-f FORMAT] -c|-m|FILE'),
3534 _(b'[-f FORMAT] -c|-m|FILE'),
3533 optionalrepo=True,
3535 optionalrepo=True,
3534 )
3536 )
3535 def debugrevlogindex(ui, repo, file_=None, **opts):
3537 def debugrevlogindex(ui, repo, file_=None, **opts):
3536 """dump the contents of a revlog index"""
3538 """dump the contents of a revlog index"""
3537 opts = pycompat.byteskwargs(opts)
3539 opts = pycompat.byteskwargs(opts)
3538 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3540 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3539 format = opts.get(b'format', 0)
3541 format = opts.get(b'format', 0)
3540 if format not in (0, 1):
3542 if format not in (0, 1):
3541 raise error.Abort(_(b"unknown format %d") % format)
3543 raise error.Abort(_(b"unknown format %d") % format)
3542
3544
3543 if ui.debugflag:
3545 if ui.debugflag:
3544 shortfn = hex
3546 shortfn = hex
3545 else:
3547 else:
3546 shortfn = short
3548 shortfn = short
3547
3549
3548 # There might not be anything in r, so have a sane default
3550 # There might not be anything in r, so have a sane default
3549 idlen = 12
3551 idlen = 12
3550 for i in r:
3552 for i in r:
3551 idlen = len(shortfn(r.node(i)))
3553 idlen = len(shortfn(r.node(i)))
3552 break
3554 break
3553
3555
3554 if format == 0:
3556 if format == 0:
3555 if ui.verbose:
3557 if ui.verbose:
3556 ui.writenoi18n(
3558 ui.writenoi18n(
3557 b" rev offset length linkrev %s %s p2\n"
3559 b" rev offset length linkrev %s %s p2\n"
3558 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3560 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3559 )
3561 )
3560 else:
3562 else:
3561 ui.writenoi18n(
3563 ui.writenoi18n(
3562 b" rev linkrev %s %s p2\n"
3564 b" rev linkrev %s %s p2\n"
3563 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3565 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3564 )
3566 )
3565 elif format == 1:
3567 elif format == 1:
3566 if ui.verbose:
3568 if ui.verbose:
3567 ui.writenoi18n(
3569 ui.writenoi18n(
3568 (
3570 (
3569 b" rev flag offset length size link p1"
3571 b" rev flag offset length size link p1"
3570 b" p2 %s\n"
3572 b" p2 %s\n"
3571 )
3573 )
3572 % b"nodeid".rjust(idlen)
3574 % b"nodeid".rjust(idlen)
3573 )
3575 )
3574 else:
3576 else:
3575 ui.writenoi18n(
3577 ui.writenoi18n(
3576 b" rev flag size link p1 p2 %s\n"
3578 b" rev flag size link p1 p2 %s\n"
3577 % b"nodeid".rjust(idlen)
3579 % b"nodeid".rjust(idlen)
3578 )
3580 )
3579
3581
3580 for i in r:
3582 for i in r:
3581 node = r.node(i)
3583 node = r.node(i)
3582 if format == 0:
3584 if format == 0:
3583 try:
3585 try:
3584 pp = r.parents(node)
3586 pp = r.parents(node)
3585 except Exception:
3587 except Exception:
3586 pp = [repo.nullid, repo.nullid]
3588 pp = [repo.nullid, repo.nullid]
3587 if ui.verbose:
3589 if ui.verbose:
3588 ui.write(
3590 ui.write(
3589 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3591 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3590 % (
3592 % (
3591 i,
3593 i,
3592 r.start(i),
3594 r.start(i),
3593 r.length(i),
3595 r.length(i),
3594 r.linkrev(i),
3596 r.linkrev(i),
3595 shortfn(node),
3597 shortfn(node),
3596 shortfn(pp[0]),
3598 shortfn(pp[0]),
3597 shortfn(pp[1]),
3599 shortfn(pp[1]),
3598 )
3600 )
3599 )
3601 )
3600 else:
3602 else:
3601 ui.write(
3603 ui.write(
3602 b"% 6d % 7d %s %s %s\n"
3604 b"% 6d % 7d %s %s %s\n"
3603 % (
3605 % (
3604 i,
3606 i,
3605 r.linkrev(i),
3607 r.linkrev(i),
3606 shortfn(node),
3608 shortfn(node),
3607 shortfn(pp[0]),
3609 shortfn(pp[0]),
3608 shortfn(pp[1]),
3610 shortfn(pp[1]),
3609 )
3611 )
3610 )
3612 )
3611 elif format == 1:
3613 elif format == 1:
3612 pr = r.parentrevs(i)
3614 pr = r.parentrevs(i)
3613 if ui.verbose:
3615 if ui.verbose:
3614 ui.write(
3616 ui.write(
3615 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3617 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3616 % (
3618 % (
3617 i,
3619 i,
3618 r.flags(i),
3620 r.flags(i),
3619 r.start(i),
3621 r.start(i),
3620 r.length(i),
3622 r.length(i),
3621 r.rawsize(i),
3623 r.rawsize(i),
3622 r.linkrev(i),
3624 r.linkrev(i),
3623 pr[0],
3625 pr[0],
3624 pr[1],
3626 pr[1],
3625 shortfn(node),
3627 shortfn(node),
3626 )
3628 )
3627 )
3629 )
3628 else:
3630 else:
3629 ui.write(
3631 ui.write(
3630 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3632 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3631 % (
3633 % (
3632 i,
3634 i,
3633 r.flags(i),
3635 r.flags(i),
3634 r.rawsize(i),
3636 r.rawsize(i),
3635 r.linkrev(i),
3637 r.linkrev(i),
3636 pr[0],
3638 pr[0],
3637 pr[1],
3639 pr[1],
3638 shortfn(node),
3640 shortfn(node),
3639 )
3641 )
3640 )
3642 )
3641
3643
3642
3644
3643 @command(
3645 @command(
3644 b'debugrevspec',
3646 b'debugrevspec',
3645 [
3647 [
3646 (
3648 (
3647 b'',
3649 b'',
3648 b'optimize',
3650 b'optimize',
3649 None,
3651 None,
3650 _(b'print parsed tree after optimizing (DEPRECATED)'),
3652 _(b'print parsed tree after optimizing (DEPRECATED)'),
3651 ),
3653 ),
3652 (
3654 (
3653 b'',
3655 b'',
3654 b'show-revs',
3656 b'show-revs',
3655 True,
3657 True,
3656 _(b'print list of result revisions (default)'),
3658 _(b'print list of result revisions (default)'),
3657 ),
3659 ),
3658 (
3660 (
3659 b's',
3661 b's',
3660 b'show-set',
3662 b'show-set',
3661 None,
3663 None,
3662 _(b'print internal representation of result set'),
3664 _(b'print internal representation of result set'),
3663 ),
3665 ),
3664 (
3666 (
3665 b'p',
3667 b'p',
3666 b'show-stage',
3668 b'show-stage',
3667 [],
3669 [],
3668 _(b'print parsed tree at the given stage'),
3670 _(b'print parsed tree at the given stage'),
3669 _(b'NAME'),
3671 _(b'NAME'),
3670 ),
3672 ),
3671 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3673 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3672 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3674 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3673 ],
3675 ],
3674 b'REVSPEC',
3676 b'REVSPEC',
3675 )
3677 )
3676 def debugrevspec(ui, repo, expr, **opts):
3678 def debugrevspec(ui, repo, expr, **opts):
3677 """parse and apply a revision specification
3679 """parse and apply a revision specification
3678
3680
3679 Use -p/--show-stage option to print the parsed tree at the given stages.
3681 Use -p/--show-stage option to print the parsed tree at the given stages.
3680 Use -p all to print tree at every stage.
3682 Use -p all to print tree at every stage.
3681
3683
3682 Use --no-show-revs option with -s or -p to print only the set
3684 Use --no-show-revs option with -s or -p to print only the set
3683 representation or the parsed tree respectively.
3685 representation or the parsed tree respectively.
3684
3686
3685 Use --verify-optimized to compare the optimized result with the unoptimized
3687 Use --verify-optimized to compare the optimized result with the unoptimized
3686 one. Returns 1 if the optimized result differs.
3688 one. Returns 1 if the optimized result differs.
3687 """
3689 """
3688 opts = pycompat.byteskwargs(opts)
3690 opts = pycompat.byteskwargs(opts)
3689 aliases = ui.configitems(b'revsetalias')
3691 aliases = ui.configitems(b'revsetalias')
3690 stages = [
3692 stages = [
3691 (b'parsed', lambda tree: tree),
3693 (b'parsed', lambda tree: tree),
3692 (
3694 (
3693 b'expanded',
3695 b'expanded',
3694 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3696 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3695 ),
3697 ),
3696 (b'concatenated', revsetlang.foldconcat),
3698 (b'concatenated', revsetlang.foldconcat),
3697 (b'analyzed', revsetlang.analyze),
3699 (b'analyzed', revsetlang.analyze),
3698 (b'optimized', revsetlang.optimize),
3700 (b'optimized', revsetlang.optimize),
3699 ]
3701 ]
3700 if opts[b'no_optimized']:
3702 if opts[b'no_optimized']:
3701 stages = stages[:-1]
3703 stages = stages[:-1]
3702 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3704 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3703 raise error.Abort(
3705 raise error.Abort(
3704 _(b'cannot use --verify-optimized with --no-optimized')
3706 _(b'cannot use --verify-optimized with --no-optimized')
3705 )
3707 )
3706 stagenames = {n for n, f in stages}
3708 stagenames = {n for n, f in stages}
3707
3709
3708 showalways = set()
3710 showalways = set()
3709 showchanged = set()
3711 showchanged = set()
3710 if ui.verbose and not opts[b'show_stage']:
3712 if ui.verbose and not opts[b'show_stage']:
3711 # show parsed tree by --verbose (deprecated)
3713 # show parsed tree by --verbose (deprecated)
3712 showalways.add(b'parsed')
3714 showalways.add(b'parsed')
3713 showchanged.update([b'expanded', b'concatenated'])
3715 showchanged.update([b'expanded', b'concatenated'])
3714 if opts[b'optimize']:
3716 if opts[b'optimize']:
3715 showalways.add(b'optimized')
3717 showalways.add(b'optimized')
3716 if opts[b'show_stage'] and opts[b'optimize']:
3718 if opts[b'show_stage'] and opts[b'optimize']:
3717 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3719 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3718 if opts[b'show_stage'] == [b'all']:
3720 if opts[b'show_stage'] == [b'all']:
3719 showalways.update(stagenames)
3721 showalways.update(stagenames)
3720 else:
3722 else:
3721 for n in opts[b'show_stage']:
3723 for n in opts[b'show_stage']:
3722 if n not in stagenames:
3724 if n not in stagenames:
3723 raise error.Abort(_(b'invalid stage name: %s') % n)
3725 raise error.Abort(_(b'invalid stage name: %s') % n)
3724 showalways.update(opts[b'show_stage'])
3726 showalways.update(opts[b'show_stage'])
3725
3727
3726 treebystage = {}
3728 treebystage = {}
3727 printedtree = None
3729 printedtree = None
3728 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3730 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3729 for n, f in stages:
3731 for n, f in stages:
3730 treebystage[n] = tree = f(tree)
3732 treebystage[n] = tree = f(tree)
3731 if n in showalways or (n in showchanged and tree != printedtree):
3733 if n in showalways or (n in showchanged and tree != printedtree):
3732 if opts[b'show_stage'] or n != b'parsed':
3734 if opts[b'show_stage'] or n != b'parsed':
3733 ui.write(b"* %s:\n" % n)
3735 ui.write(b"* %s:\n" % n)
3734 ui.write(revsetlang.prettyformat(tree), b"\n")
3736 ui.write(revsetlang.prettyformat(tree), b"\n")
3735 printedtree = tree
3737 printedtree = tree
3736
3738
3737 if opts[b'verify_optimized']:
3739 if opts[b'verify_optimized']:
3738 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3740 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3739 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3741 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3740 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3742 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3741 ui.writenoi18n(
3743 ui.writenoi18n(
3742 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3744 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3743 )
3745 )
3744 ui.writenoi18n(
3746 ui.writenoi18n(
3745 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3747 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3746 )
3748 )
3747 arevs = list(arevs)
3749 arevs = list(arevs)
3748 brevs = list(brevs)
3750 brevs = list(brevs)
3749 if arevs == brevs:
3751 if arevs == brevs:
3750 return 0
3752 return 0
3751 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3753 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3752 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3754 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3753 sm = difflib.SequenceMatcher(None, arevs, brevs)
3755 sm = difflib.SequenceMatcher(None, arevs, brevs)
3754 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3756 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3755 if tag in ('delete', 'replace'):
3757 if tag in ('delete', 'replace'):
3756 for c in arevs[alo:ahi]:
3758 for c in arevs[alo:ahi]:
3757 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3759 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3758 if tag in ('insert', 'replace'):
3760 if tag in ('insert', 'replace'):
3759 for c in brevs[blo:bhi]:
3761 for c in brevs[blo:bhi]:
3760 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3762 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3761 if tag == 'equal':
3763 if tag == 'equal':
3762 for c in arevs[alo:ahi]:
3764 for c in arevs[alo:ahi]:
3763 ui.write(b' %d\n' % c)
3765 ui.write(b' %d\n' % c)
3764 return 1
3766 return 1
3765
3767
3766 func = revset.makematcher(tree)
3768 func = revset.makematcher(tree)
3767 revs = func(repo)
3769 revs = func(repo)
3768 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3770 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3769 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3771 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3770 if not opts[b'show_revs']:
3772 if not opts[b'show_revs']:
3771 return
3773 return
3772 for c in revs:
3774 for c in revs:
3773 ui.write(b"%d\n" % c)
3775 ui.write(b"%d\n" % c)
3774
3776
3775
3777
3776 @command(
3778 @command(
3777 b'debugserve',
3779 b'debugserve',
3778 [
3780 [
3779 (
3781 (
3780 b'',
3782 b'',
3781 b'sshstdio',
3783 b'sshstdio',
3782 False,
3784 False,
3783 _(b'run an SSH server bound to process handles'),
3785 _(b'run an SSH server bound to process handles'),
3784 ),
3786 ),
3785 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3787 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3786 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3788 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3787 ],
3789 ],
3788 b'',
3790 b'',
3789 )
3791 )
3790 def debugserve(ui, repo, **opts):
3792 def debugserve(ui, repo, **opts):
3791 """run a server with advanced settings
3793 """run a server with advanced settings
3792
3794
3793 This command is similar to :hg:`serve`. It exists partially as a
3795 This command is similar to :hg:`serve`. It exists partially as a
3794 workaround to the fact that ``hg serve --stdio`` must have specific
3796 workaround to the fact that ``hg serve --stdio`` must have specific
3795 arguments for security reasons.
3797 arguments for security reasons.
3796 """
3798 """
3797 opts = pycompat.byteskwargs(opts)
3799 opts = pycompat.byteskwargs(opts)
3798
3800
3799 if not opts[b'sshstdio']:
3801 if not opts[b'sshstdio']:
3800 raise error.Abort(_(b'only --sshstdio is currently supported'))
3802 raise error.Abort(_(b'only --sshstdio is currently supported'))
3801
3803
3802 logfh = None
3804 logfh = None
3803
3805
3804 if opts[b'logiofd'] and opts[b'logiofile']:
3806 if opts[b'logiofd'] and opts[b'logiofile']:
3805 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3807 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3806
3808
3807 if opts[b'logiofd']:
3809 if opts[b'logiofd']:
3808 # Ideally we would be line buffered. But line buffering in binary
3810 # Ideally we would be line buffered. But line buffering in binary
3809 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3811 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3810 # buffering could have performance impacts. But since this isn't
3812 # buffering could have performance impacts. But since this isn't
3811 # performance critical code, it should be fine.
3813 # performance critical code, it should be fine.
3812 try:
3814 try:
3813 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3815 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3814 except OSError as e:
3816 except OSError as e:
3815 if e.errno != errno.ESPIPE:
3817 if e.errno != errno.ESPIPE:
3816 raise
3818 raise
3817 # can't seek a pipe, so `ab` mode fails on py3
3819 # can't seek a pipe, so `ab` mode fails on py3
3818 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3820 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3819 elif opts[b'logiofile']:
3821 elif opts[b'logiofile']:
3820 logfh = open(opts[b'logiofile'], b'ab', 0)
3822 logfh = open(opts[b'logiofile'], b'ab', 0)
3821
3823
3822 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3824 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3823 s.serve_forever()
3825 s.serve_forever()
3824
3826
3825
3827
3826 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3828 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3827 def debugsetparents(ui, repo, rev1, rev2=None):
3829 def debugsetparents(ui, repo, rev1, rev2=None):
3828 """manually set the parents of the current working directory (DANGEROUS)
3830 """manually set the parents of the current working directory (DANGEROUS)
3829
3831
3830 This command is not what you are looking for and should not be used. Using
3832 This command is not what you are looking for and should not be used. Using
3831 this command will most certainly results in slight corruption of the file
3833 this command will most certainly results in slight corruption of the file
3832 level histories withing your repository. DO NOT USE THIS COMMAND.
3834 level histories withing your repository. DO NOT USE THIS COMMAND.
3833
3835
3834 The command update the p1 and p2 field in the dirstate, and not touching
3836 The command update the p1 and p2 field in the dirstate, and not touching
3835 anything else. This useful for writing repository conversion tools, but
3837 anything else. This useful for writing repository conversion tools, but
3836 should be used with extreme care. For example, neither the working
3838 should be used with extreme care. For example, neither the working
3837 directory nor the dirstate is updated, so file status may be incorrect
3839 directory nor the dirstate is updated, so file status may be incorrect
3838 after running this command. Only used if you are one of the few people that
3840 after running this command. Only used if you are one of the few people that
3839 deeply unstand both conversion tools and file level histories. If you are
3841 deeply unstand both conversion tools and file level histories. If you are
3840 reading this help, you are not one of this people (most of them sailed west
3842 reading this help, you are not one of this people (most of them sailed west
3841 from Mithlond anyway.
3843 from Mithlond anyway.
3842
3844
3843 So one last time DO NOT USE THIS COMMAND.
3845 So one last time DO NOT USE THIS COMMAND.
3844
3846
3845 Returns 0 on success.
3847 Returns 0 on success.
3846 """
3848 """
3847
3849
3848 node1 = scmutil.revsingle(repo, rev1).node()
3850 node1 = scmutil.revsingle(repo, rev1).node()
3849 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3851 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3850
3852
3851 with repo.wlock():
3853 with repo.wlock():
3852 repo.setparents(node1, node2)
3854 repo.setparents(node1, node2)
3853
3855
3854
3856
3855 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3857 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3856 def debugsidedata(ui, repo, file_, rev=None, **opts):
3858 def debugsidedata(ui, repo, file_, rev=None, **opts):
3857 """dump the side data for a cl/manifest/file revision
3859 """dump the side data for a cl/manifest/file revision
3858
3860
3859 Use --verbose to dump the sidedata content."""
3861 Use --verbose to dump the sidedata content."""
3860 opts = pycompat.byteskwargs(opts)
3862 opts = pycompat.byteskwargs(opts)
3861 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3863 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3862 if rev is not None:
3864 if rev is not None:
3863 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3865 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3864 file_, rev = None, file_
3866 file_, rev = None, file_
3865 elif rev is None:
3867 elif rev is None:
3866 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3868 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3867 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3869 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3868 r = getattr(r, '_revlog', r)
3870 r = getattr(r, '_revlog', r)
3869 try:
3871 try:
3870 sidedata = r.sidedata(r.lookup(rev))
3872 sidedata = r.sidedata(r.lookup(rev))
3871 except KeyError:
3873 except KeyError:
3872 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3874 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3873 if sidedata:
3875 if sidedata:
3874 sidedata = list(sidedata.items())
3876 sidedata = list(sidedata.items())
3875 sidedata.sort()
3877 sidedata.sort()
3876 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3878 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3877 for key, value in sidedata:
3879 for key, value in sidedata:
3878 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3880 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3879 if ui.verbose:
3881 if ui.verbose:
3880 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3882 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3881
3883
3882
3884
3883 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3885 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3884 def debugssl(ui, repo, source=None, **opts):
3886 def debugssl(ui, repo, source=None, **opts):
3885 """test a secure connection to a server
3887 """test a secure connection to a server
3886
3888
3887 This builds the certificate chain for the server on Windows, installing the
3889 This builds the certificate chain for the server on Windows, installing the
3888 missing intermediates and trusted root via Windows Update if necessary. It
3890 missing intermediates and trusted root via Windows Update if necessary. It
3889 does nothing on other platforms.
3891 does nothing on other platforms.
3890
3892
3891 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3893 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3892 that server is used. See :hg:`help urls` for more information.
3894 that server is used. See :hg:`help urls` for more information.
3893
3895
3894 If the update succeeds, retry the original operation. Otherwise, the cause
3896 If the update succeeds, retry the original operation. Otherwise, the cause
3895 of the SSL error is likely another issue.
3897 of the SSL error is likely another issue.
3896 """
3898 """
3897 if not pycompat.iswindows:
3899 if not pycompat.iswindows:
3898 raise error.Abort(
3900 raise error.Abort(
3899 _(b'certificate chain building is only possible on Windows')
3901 _(b'certificate chain building is only possible on Windows')
3900 )
3902 )
3901
3903
3902 if not source:
3904 if not source:
3903 if not repo:
3905 if not repo:
3904 raise error.Abort(
3906 raise error.Abort(
3905 _(
3907 _(
3906 b"there is no Mercurial repository here, and no "
3908 b"there is no Mercurial repository here, and no "
3907 b"server specified"
3909 b"server specified"
3908 )
3910 )
3909 )
3911 )
3910 source = b"default"
3912 source = b"default"
3911
3913
3912 source, branches = urlutil.get_unique_pull_path(
3914 source, branches = urlutil.get_unique_pull_path(
3913 b'debugssl', repo, ui, source
3915 b'debugssl', repo, ui, source
3914 )
3916 )
3915 url = urlutil.url(source)
3917 url = urlutil.url(source)
3916
3918
3917 defaultport = {b'https': 443, b'ssh': 22}
3919 defaultport = {b'https': 443, b'ssh': 22}
3918 if url.scheme in defaultport:
3920 if url.scheme in defaultport:
3919 try:
3921 try:
3920 addr = (url.host, int(url.port or defaultport[url.scheme]))
3922 addr = (url.host, int(url.port or defaultport[url.scheme]))
3921 except ValueError:
3923 except ValueError:
3922 raise error.Abort(_(b"malformed port number in URL"))
3924 raise error.Abort(_(b"malformed port number in URL"))
3923 else:
3925 else:
3924 raise error.Abort(_(b"only https and ssh connections are supported"))
3926 raise error.Abort(_(b"only https and ssh connections are supported"))
3925
3927
3926 from . import win32
3928 from . import win32
3927
3929
3928 s = ssl.wrap_socket(
3930 s = ssl.wrap_socket(
3929 socket.socket(),
3931 socket.socket(),
3930 ssl_version=ssl.PROTOCOL_TLS,
3932 ssl_version=ssl.PROTOCOL_TLS,
3931 cert_reqs=ssl.CERT_NONE,
3933 cert_reqs=ssl.CERT_NONE,
3932 ca_certs=None,
3934 ca_certs=None,
3933 )
3935 )
3934
3936
3935 try:
3937 try:
3936 s.connect(addr)
3938 s.connect(addr)
3937 cert = s.getpeercert(True)
3939 cert = s.getpeercert(True)
3938
3940
3939 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3941 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3940
3942
3941 complete = win32.checkcertificatechain(cert, build=False)
3943 complete = win32.checkcertificatechain(cert, build=False)
3942
3944
3943 if not complete:
3945 if not complete:
3944 ui.status(_(b'certificate chain is incomplete, updating... '))
3946 ui.status(_(b'certificate chain is incomplete, updating... '))
3945
3947
3946 if not win32.checkcertificatechain(cert):
3948 if not win32.checkcertificatechain(cert):
3947 ui.status(_(b'failed.\n'))
3949 ui.status(_(b'failed.\n'))
3948 else:
3950 else:
3949 ui.status(_(b'done.\n'))
3951 ui.status(_(b'done.\n'))
3950 else:
3952 else:
3951 ui.status(_(b'full certificate chain is available\n'))
3953 ui.status(_(b'full certificate chain is available\n'))
3952 finally:
3954 finally:
3953 s.close()
3955 s.close()
3954
3956
3955
3957
3956 @command(
3958 @command(
3957 b"debugbackupbundle",
3959 b"debugbackupbundle",
3958 [
3960 [
3959 (
3961 (
3960 b"",
3962 b"",
3961 b"recover",
3963 b"recover",
3962 b"",
3964 b"",
3963 b"brings the specified changeset back into the repository",
3965 b"brings the specified changeset back into the repository",
3964 )
3966 )
3965 ]
3967 ]
3966 + cmdutil.logopts,
3968 + cmdutil.logopts,
3967 _(b"hg debugbackupbundle [--recover HASH]"),
3969 _(b"hg debugbackupbundle [--recover HASH]"),
3968 )
3970 )
3969 def debugbackupbundle(ui, repo, *pats, **opts):
3971 def debugbackupbundle(ui, repo, *pats, **opts):
3970 """lists the changesets available in backup bundles
3972 """lists the changesets available in backup bundles
3971
3973
3972 Without any arguments, this command prints a list of the changesets in each
3974 Without any arguments, this command prints a list of the changesets in each
3973 backup bundle.
3975 backup bundle.
3974
3976
3975 --recover takes a changeset hash and unbundles the first bundle that
3977 --recover takes a changeset hash and unbundles the first bundle that
3976 contains that hash, which puts that changeset back in your repository.
3978 contains that hash, which puts that changeset back in your repository.
3977
3979
3978 --verbose will print the entire commit message and the bundle path for that
3980 --verbose will print the entire commit message and the bundle path for that
3979 backup.
3981 backup.
3980 """
3982 """
3981 backups = list(
3983 backups = list(
3982 filter(
3984 filter(
3983 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
3985 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
3984 )
3986 )
3985 )
3987 )
3986 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
3988 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
3987
3989
3988 opts = pycompat.byteskwargs(opts)
3990 opts = pycompat.byteskwargs(opts)
3989 opts[b"bundle"] = b""
3991 opts[b"bundle"] = b""
3990 opts[b"force"] = None
3992 opts[b"force"] = None
3991 limit = logcmdutil.getlimit(opts)
3993 limit = logcmdutil.getlimit(opts)
3992
3994
3993 def display(other, chlist, displayer):
3995 def display(other, chlist, displayer):
3994 if opts.get(b"newest_first"):
3996 if opts.get(b"newest_first"):
3995 chlist.reverse()
3997 chlist.reverse()
3996 count = 0
3998 count = 0
3997 for n in chlist:
3999 for n in chlist:
3998 if limit is not None and count >= limit:
4000 if limit is not None and count >= limit:
3999 break
4001 break
4000 parents = [
4002 parents = [
4001 True for p in other.changelog.parents(n) if p != repo.nullid
4003 True for p in other.changelog.parents(n) if p != repo.nullid
4002 ]
4004 ]
4003 if opts.get(b"no_merges") and len(parents) == 2:
4005 if opts.get(b"no_merges") and len(parents) == 2:
4004 continue
4006 continue
4005 count += 1
4007 count += 1
4006 displayer.show(other[n])
4008 displayer.show(other[n])
4007
4009
4008 recovernode = opts.get(b"recover")
4010 recovernode = opts.get(b"recover")
4009 if recovernode:
4011 if recovernode:
4010 if scmutil.isrevsymbol(repo, recovernode):
4012 if scmutil.isrevsymbol(repo, recovernode):
4011 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
4013 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
4012 return
4014 return
4013 elif backups:
4015 elif backups:
4014 msg = _(
4016 msg = _(
4015 b"Recover changesets using: hg debugbackupbundle --recover "
4017 b"Recover changesets using: hg debugbackupbundle --recover "
4016 b"<changeset hash>\n\nAvailable backup changesets:"
4018 b"<changeset hash>\n\nAvailable backup changesets:"
4017 )
4019 )
4018 ui.status(msg, label=b"status.removed")
4020 ui.status(msg, label=b"status.removed")
4019 else:
4021 else:
4020 ui.status(_(b"no backup changesets found\n"))
4022 ui.status(_(b"no backup changesets found\n"))
4021 return
4023 return
4022
4024
4023 for backup in backups:
4025 for backup in backups:
4024 # Much of this is copied from the hg incoming logic
4026 # Much of this is copied from the hg incoming logic
4025 source = os.path.relpath(backup, encoding.getcwd())
4027 source = os.path.relpath(backup, encoding.getcwd())
4026 source, branches = urlutil.get_unique_pull_path(
4028 source, branches = urlutil.get_unique_pull_path(
4027 b'debugbackupbundle',
4029 b'debugbackupbundle',
4028 repo,
4030 repo,
4029 ui,
4031 ui,
4030 source,
4032 source,
4031 default_branches=opts.get(b'branch'),
4033 default_branches=opts.get(b'branch'),
4032 )
4034 )
4033 try:
4035 try:
4034 other = hg.peer(repo, opts, source)
4036 other = hg.peer(repo, opts, source)
4035 except error.LookupError as ex:
4037 except error.LookupError as ex:
4036 msg = _(b"\nwarning: unable to open bundle %s") % source
4038 msg = _(b"\nwarning: unable to open bundle %s") % source
4037 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
4039 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
4038 ui.warn(msg, hint=hint)
4040 ui.warn(msg, hint=hint)
4039 continue
4041 continue
4040 revs, checkout = hg.addbranchrevs(
4042 revs, checkout = hg.addbranchrevs(
4041 repo, other, branches, opts.get(b"rev")
4043 repo, other, branches, opts.get(b"rev")
4042 )
4044 )
4043
4045
4044 if revs:
4046 if revs:
4045 revs = [other.lookup(rev) for rev in revs]
4047 revs = [other.lookup(rev) for rev in revs]
4046
4048
4047 with ui.silent():
4049 with ui.silent():
4048 try:
4050 try:
4049 other, chlist, cleanupfn = bundlerepo.getremotechanges(
4051 other, chlist, cleanupfn = bundlerepo.getremotechanges(
4050 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
4052 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
4051 )
4053 )
4052 except error.LookupError:
4054 except error.LookupError:
4053 continue
4055 continue
4054
4056
4055 try:
4057 try:
4056 if not chlist:
4058 if not chlist:
4057 continue
4059 continue
4058 if recovernode:
4060 if recovernode:
4059 with repo.lock(), repo.transaction(b"unbundle") as tr:
4061 with repo.lock(), repo.transaction(b"unbundle") as tr:
4060 if scmutil.isrevsymbol(other, recovernode):
4062 if scmutil.isrevsymbol(other, recovernode):
4061 ui.status(_(b"Unbundling %s\n") % (recovernode))
4063 ui.status(_(b"Unbundling %s\n") % (recovernode))
4062 f = hg.openpath(ui, source)
4064 f = hg.openpath(ui, source)
4063 gen = exchange.readbundle(ui, f, source)
4065 gen = exchange.readbundle(ui, f, source)
4064 if isinstance(gen, bundle2.unbundle20):
4066 if isinstance(gen, bundle2.unbundle20):
4065 bundle2.applybundle(
4067 bundle2.applybundle(
4066 repo,
4068 repo,
4067 gen,
4069 gen,
4068 tr,
4070 tr,
4069 source=b"unbundle",
4071 source=b"unbundle",
4070 url=b"bundle:" + source,
4072 url=b"bundle:" + source,
4071 )
4073 )
4072 else:
4074 else:
4073 gen.apply(repo, b"unbundle", b"bundle:" + source)
4075 gen.apply(repo, b"unbundle", b"bundle:" + source)
4074 break
4076 break
4075 else:
4077 else:
4076 backupdate = encoding.strtolocal(
4078 backupdate = encoding.strtolocal(
4077 time.strftime(
4079 time.strftime(
4078 "%a %H:%M, %Y-%m-%d",
4080 "%a %H:%M, %Y-%m-%d",
4079 time.localtime(os.path.getmtime(source)),
4081 time.localtime(os.path.getmtime(source)),
4080 )
4082 )
4081 )
4083 )
4082 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
4084 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
4083 if ui.verbose:
4085 if ui.verbose:
4084 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), source))
4086 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), source))
4085 else:
4087 else:
4086 opts[
4088 opts[
4087 b"template"
4089 b"template"
4088 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
4090 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
4089 displayer = logcmdutil.changesetdisplayer(
4091 displayer = logcmdutil.changesetdisplayer(
4090 ui, other, opts, False
4092 ui, other, opts, False
4091 )
4093 )
4092 display(other, chlist, displayer)
4094 display(other, chlist, displayer)
4093 displayer.close()
4095 displayer.close()
4094 finally:
4096 finally:
4095 cleanupfn()
4097 cleanupfn()
4096
4098
4097
4099
4098 @command(
4100 @command(
4099 b'debugsub',
4101 b'debugsub',
4100 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
4102 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
4101 _(b'[-r REV] [REV]'),
4103 _(b'[-r REV] [REV]'),
4102 )
4104 )
4103 def debugsub(ui, repo, rev=None):
4105 def debugsub(ui, repo, rev=None):
4104 ctx = scmutil.revsingle(repo, rev, None)
4106 ctx = scmutil.revsingle(repo, rev, None)
4105 for k, v in sorted(ctx.substate.items()):
4107 for k, v in sorted(ctx.substate.items()):
4106 ui.writenoi18n(b'path %s\n' % k)
4108 ui.writenoi18n(b'path %s\n' % k)
4107 ui.writenoi18n(b' source %s\n' % v[0])
4109 ui.writenoi18n(b' source %s\n' % v[0])
4108 ui.writenoi18n(b' revision %s\n' % v[1])
4110 ui.writenoi18n(b' revision %s\n' % v[1])
4109
4111
4110
4112
4111 @command(b'debugshell', optionalrepo=True)
4113 @command(b'debugshell', optionalrepo=True)
4112 def debugshell(ui, repo):
4114 def debugshell(ui, repo):
4113 """run an interactive Python interpreter
4115 """run an interactive Python interpreter
4114
4116
4115 The local namespace is provided with a reference to the ui and
4117 The local namespace is provided with a reference to the ui and
4116 the repo instance (if available).
4118 the repo instance (if available).
4117 """
4119 """
4118 import code
4120 import code
4119
4121
4120 imported_objects = {
4122 imported_objects = {
4121 'ui': ui,
4123 'ui': ui,
4122 'repo': repo,
4124 'repo': repo,
4123 }
4125 }
4124
4126
4125 code.interact(local=imported_objects)
4127 code.interact(local=imported_objects)
4126
4128
4127
4129
4128 @command(
4130 @command(
4129 b'debugsuccessorssets',
4131 b'debugsuccessorssets',
4130 [(b'', b'closest', False, _(b'return closest successors sets only'))],
4132 [(b'', b'closest', False, _(b'return closest successors sets only'))],
4131 _(b'[REV]'),
4133 _(b'[REV]'),
4132 )
4134 )
4133 def debugsuccessorssets(ui, repo, *revs, **opts):
4135 def debugsuccessorssets(ui, repo, *revs, **opts):
4134 """show set of successors for revision
4136 """show set of successors for revision
4135
4137
4136 A successors set of changeset A is a consistent group of revisions that
4138 A successors set of changeset A is a consistent group of revisions that
4137 succeed A. It contains non-obsolete changesets only unless closests
4139 succeed A. It contains non-obsolete changesets only unless closests
4138 successors set is set.
4140 successors set is set.
4139
4141
4140 In most cases a changeset A has a single successors set containing a single
4142 In most cases a changeset A has a single successors set containing a single
4141 successor (changeset A replaced by A').
4143 successor (changeset A replaced by A').
4142
4144
4143 A changeset that is made obsolete with no successors are called "pruned".
4145 A changeset that is made obsolete with no successors are called "pruned".
4144 Such changesets have no successors sets at all.
4146 Such changesets have no successors sets at all.
4145
4147
4146 A changeset that has been "split" will have a successors set containing
4148 A changeset that has been "split" will have a successors set containing
4147 more than one successor.
4149 more than one successor.
4148
4150
4149 A changeset that has been rewritten in multiple different ways is called
4151 A changeset that has been rewritten in multiple different ways is called
4150 "divergent". Such changesets have multiple successor sets (each of which
4152 "divergent". Such changesets have multiple successor sets (each of which
4151 may also be split, i.e. have multiple successors).
4153 may also be split, i.e. have multiple successors).
4152
4154
4153 Results are displayed as follows::
4155 Results are displayed as follows::
4154
4156
4155 <rev1>
4157 <rev1>
4156 <successors-1A>
4158 <successors-1A>
4157 <rev2>
4159 <rev2>
4158 <successors-2A>
4160 <successors-2A>
4159 <successors-2B1> <successors-2B2> <successors-2B3>
4161 <successors-2B1> <successors-2B2> <successors-2B3>
4160
4162
4161 Here rev2 has two possible (i.e. divergent) successors sets. The first
4163 Here rev2 has two possible (i.e. divergent) successors sets. The first
4162 holds one element, whereas the second holds three (i.e. the changeset has
4164 holds one element, whereas the second holds three (i.e. the changeset has
4163 been split).
4165 been split).
4164 """
4166 """
4165 # passed to successorssets caching computation from one call to another
4167 # passed to successorssets caching computation from one call to another
4166 cache = {}
4168 cache = {}
4167 ctx2str = bytes
4169 ctx2str = bytes
4168 node2str = short
4170 node2str = short
4169 for rev in logcmdutil.revrange(repo, revs):
4171 for rev in logcmdutil.revrange(repo, revs):
4170 ctx = repo[rev]
4172 ctx = repo[rev]
4171 ui.write(b'%s\n' % ctx2str(ctx))
4173 ui.write(b'%s\n' % ctx2str(ctx))
4172 for succsset in obsutil.successorssets(
4174 for succsset in obsutil.successorssets(
4173 repo, ctx.node(), closest=opts['closest'], cache=cache
4175 repo, ctx.node(), closest=opts['closest'], cache=cache
4174 ):
4176 ):
4175 if succsset:
4177 if succsset:
4176 ui.write(b' ')
4178 ui.write(b' ')
4177 ui.write(node2str(succsset[0]))
4179 ui.write(node2str(succsset[0]))
4178 for node in succsset[1:]:
4180 for node in succsset[1:]:
4179 ui.write(b' ')
4181 ui.write(b' ')
4180 ui.write(node2str(node))
4182 ui.write(node2str(node))
4181 ui.write(b'\n')
4183 ui.write(b'\n')
4182
4184
4183
4185
4184 @command(b'debugtagscache', [])
4186 @command(b'debugtagscache', [])
4185 def debugtagscache(ui, repo):
4187 def debugtagscache(ui, repo):
4186 """display the contents of .hg/cache/hgtagsfnodes1"""
4188 """display the contents of .hg/cache/hgtagsfnodes1"""
4187 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
4189 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
4188 flog = repo.file(b'.hgtags')
4190 flog = repo.file(b'.hgtags')
4189 for r in repo:
4191 for r in repo:
4190 node = repo[r].node()
4192 node = repo[r].node()
4191 tagsnode = cache.getfnode(node, computemissing=False)
4193 tagsnode = cache.getfnode(node, computemissing=False)
4192 if tagsnode:
4194 if tagsnode:
4193 tagsnodedisplay = hex(tagsnode)
4195 tagsnodedisplay = hex(tagsnode)
4194 if not flog.hasnode(tagsnode):
4196 if not flog.hasnode(tagsnode):
4195 tagsnodedisplay += b' (unknown node)'
4197 tagsnodedisplay += b' (unknown node)'
4196 elif tagsnode is None:
4198 elif tagsnode is None:
4197 tagsnodedisplay = b'missing'
4199 tagsnodedisplay = b'missing'
4198 else:
4200 else:
4199 tagsnodedisplay = b'invalid'
4201 tagsnodedisplay = b'invalid'
4200
4202
4201 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
4203 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
4202
4204
4203
4205
4204 @command(
4206 @command(
4205 b'debugtemplate',
4207 b'debugtemplate',
4206 [
4208 [
4207 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
4209 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
4208 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
4210 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
4209 ],
4211 ],
4210 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
4212 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
4211 optionalrepo=True,
4213 optionalrepo=True,
4212 )
4214 )
4213 def debugtemplate(ui, repo, tmpl, **opts):
4215 def debugtemplate(ui, repo, tmpl, **opts):
4214 """parse and apply a template
4216 """parse and apply a template
4215
4217
4216 If -r/--rev is given, the template is processed as a log template and
4218 If -r/--rev is given, the template is processed as a log template and
4217 applied to the given changesets. Otherwise, it is processed as a generic
4219 applied to the given changesets. Otherwise, it is processed as a generic
4218 template.
4220 template.
4219
4221
4220 Use --verbose to print the parsed tree.
4222 Use --verbose to print the parsed tree.
4221 """
4223 """
4222 revs = None
4224 revs = None
4223 if opts['rev']:
4225 if opts['rev']:
4224 if repo is None:
4226 if repo is None:
4225 raise error.RepoError(
4227 raise error.RepoError(
4226 _(b'there is no Mercurial repository here (.hg not found)')
4228 _(b'there is no Mercurial repository here (.hg not found)')
4227 )
4229 )
4228 revs = logcmdutil.revrange(repo, opts['rev'])
4230 revs = logcmdutil.revrange(repo, opts['rev'])
4229
4231
4230 props = {}
4232 props = {}
4231 for d in opts['define']:
4233 for d in opts['define']:
4232 try:
4234 try:
4233 k, v = (e.strip() for e in d.split(b'=', 1))
4235 k, v = (e.strip() for e in d.split(b'=', 1))
4234 if not k or k == b'ui':
4236 if not k or k == b'ui':
4235 raise ValueError
4237 raise ValueError
4236 props[k] = v
4238 props[k] = v
4237 except ValueError:
4239 except ValueError:
4238 raise error.Abort(_(b'malformed keyword definition: %s') % d)
4240 raise error.Abort(_(b'malformed keyword definition: %s') % d)
4239
4241
4240 if ui.verbose:
4242 if ui.verbose:
4241 aliases = ui.configitems(b'templatealias')
4243 aliases = ui.configitems(b'templatealias')
4242 tree = templater.parse(tmpl)
4244 tree = templater.parse(tmpl)
4243 ui.note(templater.prettyformat(tree), b'\n')
4245 ui.note(templater.prettyformat(tree), b'\n')
4244 newtree = templater.expandaliases(tree, aliases)
4246 newtree = templater.expandaliases(tree, aliases)
4245 if newtree != tree:
4247 if newtree != tree:
4246 ui.notenoi18n(
4248 ui.notenoi18n(
4247 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
4249 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
4248 )
4250 )
4249
4251
4250 if revs is None:
4252 if revs is None:
4251 tres = formatter.templateresources(ui, repo)
4253 tres = formatter.templateresources(ui, repo)
4252 t = formatter.maketemplater(ui, tmpl, resources=tres)
4254 t = formatter.maketemplater(ui, tmpl, resources=tres)
4253 if ui.verbose:
4255 if ui.verbose:
4254 kwds, funcs = t.symbolsuseddefault()
4256 kwds, funcs = t.symbolsuseddefault()
4255 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4257 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4256 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4258 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4257 ui.write(t.renderdefault(props))
4259 ui.write(t.renderdefault(props))
4258 else:
4260 else:
4259 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
4261 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
4260 if ui.verbose:
4262 if ui.verbose:
4261 kwds, funcs = displayer.t.symbolsuseddefault()
4263 kwds, funcs = displayer.t.symbolsuseddefault()
4262 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4264 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4263 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4265 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4264 for r in revs:
4266 for r in revs:
4265 displayer.show(repo[r], **pycompat.strkwargs(props))
4267 displayer.show(repo[r], **pycompat.strkwargs(props))
4266 displayer.close()
4268 displayer.close()
4267
4269
4268
4270
4269 @command(
4271 @command(
4270 b'debuguigetpass',
4272 b'debuguigetpass',
4271 [
4273 [
4272 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4274 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4273 ],
4275 ],
4274 _(b'[-p TEXT]'),
4276 _(b'[-p TEXT]'),
4275 norepo=True,
4277 norepo=True,
4276 )
4278 )
4277 def debuguigetpass(ui, prompt=b''):
4279 def debuguigetpass(ui, prompt=b''):
4278 """show prompt to type password"""
4280 """show prompt to type password"""
4279 r = ui.getpass(prompt)
4281 r = ui.getpass(prompt)
4280 if r is None:
4282 if r is None:
4281 r = b"<default response>"
4283 r = b"<default response>"
4282 ui.writenoi18n(b'response: %s\n' % r)
4284 ui.writenoi18n(b'response: %s\n' % r)
4283
4285
4284
4286
4285 @command(
4287 @command(
4286 b'debuguiprompt',
4288 b'debuguiprompt',
4287 [
4289 [
4288 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4290 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4289 ],
4291 ],
4290 _(b'[-p TEXT]'),
4292 _(b'[-p TEXT]'),
4291 norepo=True,
4293 norepo=True,
4292 )
4294 )
4293 def debuguiprompt(ui, prompt=b''):
4295 def debuguiprompt(ui, prompt=b''):
4294 """show plain prompt"""
4296 """show plain prompt"""
4295 r = ui.prompt(prompt)
4297 r = ui.prompt(prompt)
4296 ui.writenoi18n(b'response: %s\n' % r)
4298 ui.writenoi18n(b'response: %s\n' % r)
4297
4299
4298
4300
4299 @command(b'debugupdatecaches', [])
4301 @command(b'debugupdatecaches', [])
4300 def debugupdatecaches(ui, repo, *pats, **opts):
4302 def debugupdatecaches(ui, repo, *pats, **opts):
4301 """warm all known caches in the repository"""
4303 """warm all known caches in the repository"""
4302 with repo.wlock(), repo.lock():
4304 with repo.wlock(), repo.lock():
4303 repo.updatecaches(caches=repository.CACHES_ALL)
4305 repo.updatecaches(caches=repository.CACHES_ALL)
4304
4306
4305
4307
4306 @command(
4308 @command(
4307 b'debugupgraderepo',
4309 b'debugupgraderepo',
4308 [
4310 [
4309 (
4311 (
4310 b'o',
4312 b'o',
4311 b'optimize',
4313 b'optimize',
4312 [],
4314 [],
4313 _(b'extra optimization to perform'),
4315 _(b'extra optimization to perform'),
4314 _(b'NAME'),
4316 _(b'NAME'),
4315 ),
4317 ),
4316 (b'', b'run', False, _(b'performs an upgrade')),
4318 (b'', b'run', False, _(b'performs an upgrade')),
4317 (b'', b'backup', True, _(b'keep the old repository content around')),
4319 (b'', b'backup', True, _(b'keep the old repository content around')),
4318 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
4320 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
4319 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
4321 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
4320 (b'', b'filelogs', None, _(b'select all filelogs for upgrade')),
4322 (b'', b'filelogs', None, _(b'select all filelogs for upgrade')),
4321 ],
4323 ],
4322 )
4324 )
4323 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
4325 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
4324 """upgrade a repository to use different features
4326 """upgrade a repository to use different features
4325
4327
4326 If no arguments are specified, the repository is evaluated for upgrade
4328 If no arguments are specified, the repository is evaluated for upgrade
4327 and a list of problems and potential optimizations is printed.
4329 and a list of problems and potential optimizations is printed.
4328
4330
4329 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
4331 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
4330 can be influenced via additional arguments. More details will be provided
4332 can be influenced via additional arguments. More details will be provided
4331 by the command output when run without ``--run``.
4333 by the command output when run without ``--run``.
4332
4334
4333 During the upgrade, the repository will be locked and no writes will be
4335 During the upgrade, the repository will be locked and no writes will be
4334 allowed.
4336 allowed.
4335
4337
4336 At the end of the upgrade, the repository may not be readable while new
4338 At the end of the upgrade, the repository may not be readable while new
4337 repository data is swapped in. This window will be as long as it takes to
4339 repository data is swapped in. This window will be as long as it takes to
4338 rename some directories inside the ``.hg`` directory. On most machines, this
4340 rename some directories inside the ``.hg`` directory. On most machines, this
4339 should complete almost instantaneously and the chances of a consumer being
4341 should complete almost instantaneously and the chances of a consumer being
4340 unable to access the repository should be low.
4342 unable to access the repository should be low.
4341
4343
4342 By default, all revlogs will be upgraded. You can restrict this using flags
4344 By default, all revlogs will be upgraded. You can restrict this using flags
4343 such as `--manifest`:
4345 such as `--manifest`:
4344
4346
4345 * `--manifest`: only optimize the manifest
4347 * `--manifest`: only optimize the manifest
4346 * `--no-manifest`: optimize all revlog but the manifest
4348 * `--no-manifest`: optimize all revlog but the manifest
4347 * `--changelog`: optimize the changelog only
4349 * `--changelog`: optimize the changelog only
4348 * `--no-changelog --no-manifest`: optimize filelogs only
4350 * `--no-changelog --no-manifest`: optimize filelogs only
4349 * `--filelogs`: optimize the filelogs only
4351 * `--filelogs`: optimize the filelogs only
4350 * `--no-changelog --no-manifest --no-filelogs`: skip all revlog optimizations
4352 * `--no-changelog --no-manifest --no-filelogs`: skip all revlog optimizations
4351 """
4353 """
4352 return upgrade.upgraderepo(
4354 return upgrade.upgraderepo(
4353 ui, repo, run=run, optimize=set(optimize), backup=backup, **opts
4355 ui, repo, run=run, optimize=set(optimize), backup=backup, **opts
4354 )
4356 )
4355
4357
4356
4358
4357 @command(
4359 @command(
4358 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
4360 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
4359 )
4361 )
4360 def debugwalk(ui, repo, *pats, **opts):
4362 def debugwalk(ui, repo, *pats, **opts):
4361 """show how files match on given patterns"""
4363 """show how files match on given patterns"""
4362 opts = pycompat.byteskwargs(opts)
4364 opts = pycompat.byteskwargs(opts)
4363 m = scmutil.match(repo[None], pats, opts)
4365 m = scmutil.match(repo[None], pats, opts)
4364 if ui.verbose:
4366 if ui.verbose:
4365 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
4367 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
4366 items = list(repo[None].walk(m))
4368 items = list(repo[None].walk(m))
4367 if not items:
4369 if not items:
4368 return
4370 return
4369 f = lambda fn: fn
4371 f = lambda fn: fn
4370 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
4372 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
4371 f = lambda fn: util.normpath(fn)
4373 f = lambda fn: util.normpath(fn)
4372 fmt = b'f %%-%ds %%-%ds %%s' % (
4374 fmt = b'f %%-%ds %%-%ds %%s' % (
4373 max([len(abs) for abs in items]),
4375 max([len(abs) for abs in items]),
4374 max([len(repo.pathto(abs)) for abs in items]),
4376 max([len(repo.pathto(abs)) for abs in items]),
4375 )
4377 )
4376 for abs in items:
4378 for abs in items:
4377 line = fmt % (
4379 line = fmt % (
4378 abs,
4380 abs,
4379 f(repo.pathto(abs)),
4381 f(repo.pathto(abs)),
4380 m.exact(abs) and b'exact' or b'',
4382 m.exact(abs) and b'exact' or b'',
4381 )
4383 )
4382 ui.write(b"%s\n" % line.rstrip())
4384 ui.write(b"%s\n" % line.rstrip())
4383
4385
4384
4386
4385 @command(b'debugwhyunstable', [], _(b'REV'))
4387 @command(b'debugwhyunstable', [], _(b'REV'))
4386 def debugwhyunstable(ui, repo, rev):
4388 def debugwhyunstable(ui, repo, rev):
4387 """explain instabilities of a changeset"""
4389 """explain instabilities of a changeset"""
4388 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
4390 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
4389 dnodes = b''
4391 dnodes = b''
4390 if entry.get(b'divergentnodes'):
4392 if entry.get(b'divergentnodes'):
4391 dnodes = (
4393 dnodes = (
4392 b' '.join(
4394 b' '.join(
4393 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
4395 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
4394 for ctx in entry[b'divergentnodes']
4396 for ctx in entry[b'divergentnodes']
4395 )
4397 )
4396 + b' '
4398 + b' '
4397 )
4399 )
4398 ui.write(
4400 ui.write(
4399 b'%s: %s%s %s\n'
4401 b'%s: %s%s %s\n'
4400 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
4402 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
4401 )
4403 )
4402
4404
4403
4405
4404 @command(
4406 @command(
4405 b'debugwireargs',
4407 b'debugwireargs',
4406 [
4408 [
4407 (b'', b'three', b'', b'three'),
4409 (b'', b'three', b'', b'three'),
4408 (b'', b'four', b'', b'four'),
4410 (b'', b'four', b'', b'four'),
4409 (b'', b'five', b'', b'five'),
4411 (b'', b'five', b'', b'five'),
4410 ]
4412 ]
4411 + cmdutil.remoteopts,
4413 + cmdutil.remoteopts,
4412 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
4414 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
4413 norepo=True,
4415 norepo=True,
4414 )
4416 )
4415 def debugwireargs(ui, repopath, *vals, **opts):
4417 def debugwireargs(ui, repopath, *vals, **opts):
4416 opts = pycompat.byteskwargs(opts)
4418 opts = pycompat.byteskwargs(opts)
4417 repo = hg.peer(ui, opts, repopath)
4419 repo = hg.peer(ui, opts, repopath)
4418 try:
4420 try:
4419 for opt in cmdutil.remoteopts:
4421 for opt in cmdutil.remoteopts:
4420 del opts[opt[1]]
4422 del opts[opt[1]]
4421 args = {}
4423 args = {}
4422 for k, v in opts.items():
4424 for k, v in opts.items():
4423 if v:
4425 if v:
4424 args[k] = v
4426 args[k] = v
4425 args = pycompat.strkwargs(args)
4427 args = pycompat.strkwargs(args)
4426 # run twice to check that we don't mess up the stream for the next command
4428 # run twice to check that we don't mess up the stream for the next command
4427 res1 = repo.debugwireargs(*vals, **args)
4429 res1 = repo.debugwireargs(*vals, **args)
4428 res2 = repo.debugwireargs(*vals, **args)
4430 res2 = repo.debugwireargs(*vals, **args)
4429 ui.write(b"%s\n" % res1)
4431 ui.write(b"%s\n" % res1)
4430 if res1 != res2:
4432 if res1 != res2:
4431 ui.warn(b"%s\n" % res2)
4433 ui.warn(b"%s\n" % res2)
4432 finally:
4434 finally:
4433 repo.close()
4435 repo.close()
4434
4436
4435
4437
4436 def _parsewirelangblocks(fh):
4438 def _parsewirelangblocks(fh):
4437 activeaction = None
4439 activeaction = None
4438 blocklines = []
4440 blocklines = []
4439 lastindent = 0
4441 lastindent = 0
4440
4442
4441 for line in fh:
4443 for line in fh:
4442 line = line.rstrip()
4444 line = line.rstrip()
4443 if not line:
4445 if not line:
4444 continue
4446 continue
4445
4447
4446 if line.startswith(b'#'):
4448 if line.startswith(b'#'):
4447 continue
4449 continue
4448
4450
4449 if not line.startswith(b' '):
4451 if not line.startswith(b' '):
4450 # New block. Flush previous one.
4452 # New block. Flush previous one.
4451 if activeaction:
4453 if activeaction:
4452 yield activeaction, blocklines
4454 yield activeaction, blocklines
4453
4455
4454 activeaction = line
4456 activeaction = line
4455 blocklines = []
4457 blocklines = []
4456 lastindent = 0
4458 lastindent = 0
4457 continue
4459 continue
4458
4460
4459 # Else we start with an indent.
4461 # Else we start with an indent.
4460
4462
4461 if not activeaction:
4463 if not activeaction:
4462 raise error.Abort(_(b'indented line outside of block'))
4464 raise error.Abort(_(b'indented line outside of block'))
4463
4465
4464 indent = len(line) - len(line.lstrip())
4466 indent = len(line) - len(line.lstrip())
4465
4467
4466 # If this line is indented more than the last line, concatenate it.
4468 # If this line is indented more than the last line, concatenate it.
4467 if indent > lastindent and blocklines:
4469 if indent > lastindent and blocklines:
4468 blocklines[-1] += line.lstrip()
4470 blocklines[-1] += line.lstrip()
4469 else:
4471 else:
4470 blocklines.append(line)
4472 blocklines.append(line)
4471 lastindent = indent
4473 lastindent = indent
4472
4474
4473 # Flush last block.
4475 # Flush last block.
4474 if activeaction:
4476 if activeaction:
4475 yield activeaction, blocklines
4477 yield activeaction, blocklines
4476
4478
4477
4479
4478 @command(
4480 @command(
4479 b'debugwireproto',
4481 b'debugwireproto',
4480 [
4482 [
4481 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
4483 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
4482 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
4484 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
4483 (
4485 (
4484 b'',
4486 b'',
4485 b'noreadstderr',
4487 b'noreadstderr',
4486 False,
4488 False,
4487 _(b'do not read from stderr of the remote'),
4489 _(b'do not read from stderr of the remote'),
4488 ),
4490 ),
4489 (
4491 (
4490 b'',
4492 b'',
4491 b'nologhandshake',
4493 b'nologhandshake',
4492 False,
4494 False,
4493 _(b'do not log I/O related to the peer handshake'),
4495 _(b'do not log I/O related to the peer handshake'),
4494 ),
4496 ),
4495 ]
4497 ]
4496 + cmdutil.remoteopts,
4498 + cmdutil.remoteopts,
4497 _(b'[PATH]'),
4499 _(b'[PATH]'),
4498 optionalrepo=True,
4500 optionalrepo=True,
4499 )
4501 )
4500 def debugwireproto(ui, repo, path=None, **opts):
4502 def debugwireproto(ui, repo, path=None, **opts):
4501 """send wire protocol commands to a server
4503 """send wire protocol commands to a server
4502
4504
4503 This command can be used to issue wire protocol commands to remote
4505 This command can be used to issue wire protocol commands to remote
4504 peers and to debug the raw data being exchanged.
4506 peers and to debug the raw data being exchanged.
4505
4507
4506 ``--localssh`` will start an SSH server against the current repository
4508 ``--localssh`` will start an SSH server against the current repository
4507 and connect to that. By default, the connection will perform a handshake
4509 and connect to that. By default, the connection will perform a handshake
4508 and establish an appropriate peer instance.
4510 and establish an appropriate peer instance.
4509
4511
4510 ``--peer`` can be used to bypass the handshake protocol and construct a
4512 ``--peer`` can be used to bypass the handshake protocol and construct a
4511 peer instance using the specified class type. Valid values are ``raw``,
4513 peer instance using the specified class type. Valid values are ``raw``,
4512 ``ssh1``. ``raw`` instances only allow sending raw data payloads and
4514 ``ssh1``. ``raw`` instances only allow sending raw data payloads and
4513 don't support higher-level command actions.
4515 don't support higher-level command actions.
4514
4516
4515 ``--noreadstderr`` can be used to disable automatic reading from stderr
4517 ``--noreadstderr`` can be used to disable automatic reading from stderr
4516 of the peer (for SSH connections only). Disabling automatic reading of
4518 of the peer (for SSH connections only). Disabling automatic reading of
4517 stderr is useful for making output more deterministic.
4519 stderr is useful for making output more deterministic.
4518
4520
4519 Commands are issued via a mini language which is specified via stdin.
4521 Commands are issued via a mini language which is specified via stdin.
4520 The language consists of individual actions to perform. An action is
4522 The language consists of individual actions to perform. An action is
4521 defined by a block. A block is defined as a line with no leading
4523 defined by a block. A block is defined as a line with no leading
4522 space followed by 0 or more lines with leading space. Blocks are
4524 space followed by 0 or more lines with leading space. Blocks are
4523 effectively a high-level command with additional metadata.
4525 effectively a high-level command with additional metadata.
4524
4526
4525 Lines beginning with ``#`` are ignored.
4527 Lines beginning with ``#`` are ignored.
4526
4528
4527 The following sections denote available actions.
4529 The following sections denote available actions.
4528
4530
4529 raw
4531 raw
4530 ---
4532 ---
4531
4533
4532 Send raw data to the server.
4534 Send raw data to the server.
4533
4535
4534 The block payload contains the raw data to send as one atomic send
4536 The block payload contains the raw data to send as one atomic send
4535 operation. The data may not actually be delivered in a single system
4537 operation. The data may not actually be delivered in a single system
4536 call: it depends on the abilities of the transport being used.
4538 call: it depends on the abilities of the transport being used.
4537
4539
4538 Each line in the block is de-indented and concatenated. Then, that
4540 Each line in the block is de-indented and concatenated. Then, that
4539 value is evaluated as a Python b'' literal. This allows the use of
4541 value is evaluated as a Python b'' literal. This allows the use of
4540 backslash escaping, etc.
4542 backslash escaping, etc.
4541
4543
4542 raw+
4544 raw+
4543 ----
4545 ----
4544
4546
4545 Behaves like ``raw`` except flushes output afterwards.
4547 Behaves like ``raw`` except flushes output afterwards.
4546
4548
4547 command <X>
4549 command <X>
4548 -----------
4550 -----------
4549
4551
4550 Send a request to run a named command, whose name follows the ``command``
4552 Send a request to run a named command, whose name follows the ``command``
4551 string.
4553 string.
4552
4554
4553 Arguments to the command are defined as lines in this block. The format of
4555 Arguments to the command are defined as lines in this block. The format of
4554 each line is ``<key> <value>``. e.g.::
4556 each line is ``<key> <value>``. e.g.::
4555
4557
4556 command listkeys
4558 command listkeys
4557 namespace bookmarks
4559 namespace bookmarks
4558
4560
4559 If the value begins with ``eval:``, it will be interpreted as a Python
4561 If the value begins with ``eval:``, it will be interpreted as a Python
4560 literal expression. Otherwise values are interpreted as Python b'' literals.
4562 literal expression. Otherwise values are interpreted as Python b'' literals.
4561 This allows sending complex types and encoding special byte sequences via
4563 This allows sending complex types and encoding special byte sequences via
4562 backslash escaping.
4564 backslash escaping.
4563
4565
4564 The following arguments have special meaning:
4566 The following arguments have special meaning:
4565
4567
4566 ``PUSHFILE``
4568 ``PUSHFILE``
4567 When defined, the *push* mechanism of the peer will be used instead
4569 When defined, the *push* mechanism of the peer will be used instead
4568 of the static request-response mechanism and the content of the
4570 of the static request-response mechanism and the content of the
4569 file specified in the value of this argument will be sent as the
4571 file specified in the value of this argument will be sent as the
4570 command payload.
4572 command payload.
4571
4573
4572 This can be used to submit a local bundle file to the remote.
4574 This can be used to submit a local bundle file to the remote.
4573
4575
4574 batchbegin
4576 batchbegin
4575 ----------
4577 ----------
4576
4578
4577 Instruct the peer to begin a batched send.
4579 Instruct the peer to begin a batched send.
4578
4580
4579 All ``command`` blocks are queued for execution until the next
4581 All ``command`` blocks are queued for execution until the next
4580 ``batchsubmit`` block.
4582 ``batchsubmit`` block.
4581
4583
4582 batchsubmit
4584 batchsubmit
4583 -----------
4585 -----------
4584
4586
4585 Submit previously queued ``command`` blocks as a batch request.
4587 Submit previously queued ``command`` blocks as a batch request.
4586
4588
4587 This action MUST be paired with a ``batchbegin`` action.
4589 This action MUST be paired with a ``batchbegin`` action.
4588
4590
4589 httprequest <method> <path>
4591 httprequest <method> <path>
4590 ---------------------------
4592 ---------------------------
4591
4593
4592 (HTTP peer only)
4594 (HTTP peer only)
4593
4595
4594 Send an HTTP request to the peer.
4596 Send an HTTP request to the peer.
4595
4597
4596 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4598 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4597
4599
4598 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4600 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4599 headers to add to the request. e.g. ``Accept: foo``.
4601 headers to add to the request. e.g. ``Accept: foo``.
4600
4602
4601 The following arguments are special:
4603 The following arguments are special:
4602
4604
4603 ``BODYFILE``
4605 ``BODYFILE``
4604 The content of the file defined as the value to this argument will be
4606 The content of the file defined as the value to this argument will be
4605 transferred verbatim as the HTTP request body.
4607 transferred verbatim as the HTTP request body.
4606
4608
4607 ``frame <type> <flags> <payload>``
4609 ``frame <type> <flags> <payload>``
4608 Send a unified protocol frame as part of the request body.
4610 Send a unified protocol frame as part of the request body.
4609
4611
4610 All frames will be collected and sent as the body to the HTTP
4612 All frames will be collected and sent as the body to the HTTP
4611 request.
4613 request.
4612
4614
4613 close
4615 close
4614 -----
4616 -----
4615
4617
4616 Close the connection to the server.
4618 Close the connection to the server.
4617
4619
4618 flush
4620 flush
4619 -----
4621 -----
4620
4622
4621 Flush data written to the server.
4623 Flush data written to the server.
4622
4624
4623 readavailable
4625 readavailable
4624 -------------
4626 -------------
4625
4627
4626 Close the write end of the connection and read all available data from
4628 Close the write end of the connection and read all available data from
4627 the server.
4629 the server.
4628
4630
4629 If the connection to the server encompasses multiple pipes, we poll both
4631 If the connection to the server encompasses multiple pipes, we poll both
4630 pipes and read available data.
4632 pipes and read available data.
4631
4633
4632 readline
4634 readline
4633 --------
4635 --------
4634
4636
4635 Read a line of output from the server. If there are multiple output
4637 Read a line of output from the server. If there are multiple output
4636 pipes, reads only the main pipe.
4638 pipes, reads only the main pipe.
4637
4639
4638 ereadline
4640 ereadline
4639 ---------
4641 ---------
4640
4642
4641 Like ``readline``, but read from the stderr pipe, if available.
4643 Like ``readline``, but read from the stderr pipe, if available.
4642
4644
4643 read <X>
4645 read <X>
4644 --------
4646 --------
4645
4647
4646 ``read()`` N bytes from the server's main output pipe.
4648 ``read()`` N bytes from the server's main output pipe.
4647
4649
4648 eread <X>
4650 eread <X>
4649 ---------
4651 ---------
4650
4652
4651 ``read()`` N bytes from the server's stderr pipe, if available.
4653 ``read()`` N bytes from the server's stderr pipe, if available.
4652
4654
4653 Specifying Unified Frame-Based Protocol Frames
4655 Specifying Unified Frame-Based Protocol Frames
4654 ----------------------------------------------
4656 ----------------------------------------------
4655
4657
4656 It is possible to emit a *Unified Frame-Based Protocol* by using special
4658 It is possible to emit a *Unified Frame-Based Protocol* by using special
4657 syntax.
4659 syntax.
4658
4660
4659 A frame is composed as a type, flags, and payload. These can be parsed
4661 A frame is composed as a type, flags, and payload. These can be parsed
4660 from a string of the form:
4662 from a string of the form:
4661
4663
4662 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4664 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4663
4665
4664 ``request-id`` and ``stream-id`` are integers defining the request and
4666 ``request-id`` and ``stream-id`` are integers defining the request and
4665 stream identifiers.
4667 stream identifiers.
4666
4668
4667 ``type`` can be an integer value for the frame type or the string name
4669 ``type`` can be an integer value for the frame type or the string name
4668 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4670 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4669 ``command-name``.
4671 ``command-name``.
4670
4672
4671 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4673 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4672 components. Each component (and there can be just one) can be an integer
4674 components. Each component (and there can be just one) can be an integer
4673 or a flag name for stream flags or frame flags, respectively. Values are
4675 or a flag name for stream flags or frame flags, respectively. Values are
4674 resolved to integers and then bitwise OR'd together.
4676 resolved to integers and then bitwise OR'd together.
4675
4677
4676 ``payload`` represents the raw frame payload. If it begins with
4678 ``payload`` represents the raw frame payload. If it begins with
4677 ``cbor:``, the following string is evaluated as Python code and the
4679 ``cbor:``, the following string is evaluated as Python code and the
4678 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4680 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4679 as a Python byte string literal.
4681 as a Python byte string literal.
4680 """
4682 """
4681 opts = pycompat.byteskwargs(opts)
4683 opts = pycompat.byteskwargs(opts)
4682
4684
4683 if opts[b'localssh'] and not repo:
4685 if opts[b'localssh'] and not repo:
4684 raise error.Abort(_(b'--localssh requires a repository'))
4686 raise error.Abort(_(b'--localssh requires a repository'))
4685
4687
4686 if opts[b'peer'] and opts[b'peer'] not in (
4688 if opts[b'peer'] and opts[b'peer'] not in (
4687 b'raw',
4689 b'raw',
4688 b'ssh1',
4690 b'ssh1',
4689 ):
4691 ):
4690 raise error.Abort(
4692 raise error.Abort(
4691 _(b'invalid value for --peer'),
4693 _(b'invalid value for --peer'),
4692 hint=_(b'valid values are "raw" and "ssh1"'),
4694 hint=_(b'valid values are "raw" and "ssh1"'),
4693 )
4695 )
4694
4696
4695 if path and opts[b'localssh']:
4697 if path and opts[b'localssh']:
4696 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4698 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4697
4699
4698 if ui.interactive():
4700 if ui.interactive():
4699 ui.write(_(b'(waiting for commands on stdin)\n'))
4701 ui.write(_(b'(waiting for commands on stdin)\n'))
4700
4702
4701 blocks = list(_parsewirelangblocks(ui.fin))
4703 blocks = list(_parsewirelangblocks(ui.fin))
4702
4704
4703 proc = None
4705 proc = None
4704 stdin = None
4706 stdin = None
4705 stdout = None
4707 stdout = None
4706 stderr = None
4708 stderr = None
4707 opener = None
4709 opener = None
4708
4710
4709 if opts[b'localssh']:
4711 if opts[b'localssh']:
4710 # We start the SSH server in its own process so there is process
4712 # We start the SSH server in its own process so there is process
4711 # separation. This prevents a whole class of potential bugs around
4713 # separation. This prevents a whole class of potential bugs around
4712 # shared state from interfering with server operation.
4714 # shared state from interfering with server operation.
4713 args = procutil.hgcmd() + [
4715 args = procutil.hgcmd() + [
4714 b'-R',
4716 b'-R',
4715 repo.root,
4717 repo.root,
4716 b'debugserve',
4718 b'debugserve',
4717 b'--sshstdio',
4719 b'--sshstdio',
4718 ]
4720 ]
4719 proc = subprocess.Popen(
4721 proc = subprocess.Popen(
4720 pycompat.rapply(procutil.tonativestr, args),
4722 pycompat.rapply(procutil.tonativestr, args),
4721 stdin=subprocess.PIPE,
4723 stdin=subprocess.PIPE,
4722 stdout=subprocess.PIPE,
4724 stdout=subprocess.PIPE,
4723 stderr=subprocess.PIPE,
4725 stderr=subprocess.PIPE,
4724 bufsize=0,
4726 bufsize=0,
4725 )
4727 )
4726
4728
4727 stdin = proc.stdin
4729 stdin = proc.stdin
4728 stdout = proc.stdout
4730 stdout = proc.stdout
4729 stderr = proc.stderr
4731 stderr = proc.stderr
4730
4732
4731 # We turn the pipes into observers so we can log I/O.
4733 # We turn the pipes into observers so we can log I/O.
4732 if ui.verbose or opts[b'peer'] == b'raw':
4734 if ui.verbose or opts[b'peer'] == b'raw':
4733 stdin = util.makeloggingfileobject(
4735 stdin = util.makeloggingfileobject(
4734 ui, proc.stdin, b'i', logdata=True
4736 ui, proc.stdin, b'i', logdata=True
4735 )
4737 )
4736 stdout = util.makeloggingfileobject(
4738 stdout = util.makeloggingfileobject(
4737 ui, proc.stdout, b'o', logdata=True
4739 ui, proc.stdout, b'o', logdata=True
4738 )
4740 )
4739 stderr = util.makeloggingfileobject(
4741 stderr = util.makeloggingfileobject(
4740 ui, proc.stderr, b'e', logdata=True
4742 ui, proc.stderr, b'e', logdata=True
4741 )
4743 )
4742
4744
4743 # --localssh also implies the peer connection settings.
4745 # --localssh also implies the peer connection settings.
4744
4746
4745 url = b'ssh://localserver'
4747 url = b'ssh://localserver'
4746 autoreadstderr = not opts[b'noreadstderr']
4748 autoreadstderr = not opts[b'noreadstderr']
4747
4749
4748 if opts[b'peer'] == b'ssh1':
4750 if opts[b'peer'] == b'ssh1':
4749 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4751 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4750 peer = sshpeer.sshv1peer(
4752 peer = sshpeer.sshv1peer(
4751 ui,
4753 ui,
4752 url,
4754 url,
4753 proc,
4755 proc,
4754 stdin,
4756 stdin,
4755 stdout,
4757 stdout,
4756 stderr,
4758 stderr,
4757 None,
4759 None,
4758 autoreadstderr=autoreadstderr,
4760 autoreadstderr=autoreadstderr,
4759 )
4761 )
4760 elif opts[b'peer'] == b'raw':
4762 elif opts[b'peer'] == b'raw':
4761 ui.write(_(b'using raw connection to peer\n'))
4763 ui.write(_(b'using raw connection to peer\n'))
4762 peer = None
4764 peer = None
4763 else:
4765 else:
4764 ui.write(_(b'creating ssh peer from handshake results\n'))
4766 ui.write(_(b'creating ssh peer from handshake results\n'))
4765 peer = sshpeer.makepeer(
4767 peer = sshpeer.makepeer(
4766 ui,
4768 ui,
4767 url,
4769 url,
4768 proc,
4770 proc,
4769 stdin,
4771 stdin,
4770 stdout,
4772 stdout,
4771 stderr,
4773 stderr,
4772 autoreadstderr=autoreadstderr,
4774 autoreadstderr=autoreadstderr,
4773 )
4775 )
4774
4776
4775 elif path:
4777 elif path:
4776 # We bypass hg.peer() so we can proxy the sockets.
4778 # We bypass hg.peer() so we can proxy the sockets.
4777 # TODO consider not doing this because we skip
4779 # TODO consider not doing this because we skip
4778 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4780 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4779 u = urlutil.url(path)
4781 u = urlutil.url(path)
4780 if u.scheme != b'http':
4782 if u.scheme != b'http':
4781 raise error.Abort(_(b'only http:// paths are currently supported'))
4783 raise error.Abort(_(b'only http:// paths are currently supported'))
4782
4784
4783 url, authinfo = u.authinfo()
4785 url, authinfo = u.authinfo()
4784 openerargs = {
4786 openerargs = {
4785 'useragent': b'Mercurial debugwireproto',
4787 'useragent': b'Mercurial debugwireproto',
4786 }
4788 }
4787
4789
4788 # Turn pipes/sockets into observers so we can log I/O.
4790 # Turn pipes/sockets into observers so we can log I/O.
4789 if ui.verbose:
4791 if ui.verbose:
4790 openerargs.update(
4792 openerargs.update(
4791 {
4793 {
4792 'loggingfh': ui,
4794 'loggingfh': ui,
4793 'loggingname': b's',
4795 'loggingname': b's',
4794 'loggingopts': {
4796 'loggingopts': {
4795 'logdata': True,
4797 'logdata': True,
4796 'logdataapis': False,
4798 'logdataapis': False,
4797 },
4799 },
4798 }
4800 }
4799 )
4801 )
4800
4802
4801 if ui.debugflag:
4803 if ui.debugflag:
4802 openerargs['loggingopts']['logdataapis'] = True
4804 openerargs['loggingopts']['logdataapis'] = True
4803
4805
4804 # Don't send default headers when in raw mode. This allows us to
4806 # Don't send default headers when in raw mode. This allows us to
4805 # bypass most of the behavior of our URL handling code so we can
4807 # bypass most of the behavior of our URL handling code so we can
4806 # have near complete control over what's sent on the wire.
4808 # have near complete control over what's sent on the wire.
4807 if opts[b'peer'] == b'raw':
4809 if opts[b'peer'] == b'raw':
4808 openerargs['sendaccept'] = False
4810 openerargs['sendaccept'] = False
4809
4811
4810 opener = urlmod.opener(ui, authinfo, **openerargs)
4812 opener = urlmod.opener(ui, authinfo, **openerargs)
4811
4813
4812 if opts[b'peer'] == b'raw':
4814 if opts[b'peer'] == b'raw':
4813 ui.write(_(b'using raw connection to peer\n'))
4815 ui.write(_(b'using raw connection to peer\n'))
4814 peer = None
4816 peer = None
4815 elif opts[b'peer']:
4817 elif opts[b'peer']:
4816 raise error.Abort(
4818 raise error.Abort(
4817 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4819 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4818 )
4820 )
4819 else:
4821 else:
4820 peer = httppeer.makepeer(ui, path, opener=opener)
4822 peer = httppeer.makepeer(ui, path, opener=opener)
4821
4823
4822 # We /could/ populate stdin/stdout with sock.makefile()...
4824 # We /could/ populate stdin/stdout with sock.makefile()...
4823 else:
4825 else:
4824 raise error.Abort(_(b'unsupported connection configuration'))
4826 raise error.Abort(_(b'unsupported connection configuration'))
4825
4827
4826 batchedcommands = None
4828 batchedcommands = None
4827
4829
4828 # Now perform actions based on the parsed wire language instructions.
4830 # Now perform actions based on the parsed wire language instructions.
4829 for action, lines in blocks:
4831 for action, lines in blocks:
4830 if action in (b'raw', b'raw+'):
4832 if action in (b'raw', b'raw+'):
4831 if not stdin:
4833 if not stdin:
4832 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4834 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4833
4835
4834 # Concatenate the data together.
4836 # Concatenate the data together.
4835 data = b''.join(l.lstrip() for l in lines)
4837 data = b''.join(l.lstrip() for l in lines)
4836 data = stringutil.unescapestr(data)
4838 data = stringutil.unescapestr(data)
4837 stdin.write(data)
4839 stdin.write(data)
4838
4840
4839 if action == b'raw+':
4841 if action == b'raw+':
4840 stdin.flush()
4842 stdin.flush()
4841 elif action == b'flush':
4843 elif action == b'flush':
4842 if not stdin:
4844 if not stdin:
4843 raise error.Abort(_(b'cannot call flush on this peer'))
4845 raise error.Abort(_(b'cannot call flush on this peer'))
4844 stdin.flush()
4846 stdin.flush()
4845 elif action.startswith(b'command'):
4847 elif action.startswith(b'command'):
4846 if not peer:
4848 if not peer:
4847 raise error.Abort(
4849 raise error.Abort(
4848 _(
4850 _(
4849 b'cannot send commands unless peer instance '
4851 b'cannot send commands unless peer instance '
4850 b'is available'
4852 b'is available'
4851 )
4853 )
4852 )
4854 )
4853
4855
4854 command = action.split(b' ', 1)[1]
4856 command = action.split(b' ', 1)[1]
4855
4857
4856 args = {}
4858 args = {}
4857 for line in lines:
4859 for line in lines:
4858 # We need to allow empty values.
4860 # We need to allow empty values.
4859 fields = line.lstrip().split(b' ', 1)
4861 fields = line.lstrip().split(b' ', 1)
4860 if len(fields) == 1:
4862 if len(fields) == 1:
4861 key = fields[0]
4863 key = fields[0]
4862 value = b''
4864 value = b''
4863 else:
4865 else:
4864 key, value = fields
4866 key, value = fields
4865
4867
4866 if value.startswith(b'eval:'):
4868 if value.startswith(b'eval:'):
4867 value = stringutil.evalpythonliteral(value[5:])
4869 value = stringutil.evalpythonliteral(value[5:])
4868 else:
4870 else:
4869 value = stringutil.unescapestr(value)
4871 value = stringutil.unescapestr(value)
4870
4872
4871 args[key] = value
4873 args[key] = value
4872
4874
4873 if batchedcommands is not None:
4875 if batchedcommands is not None:
4874 batchedcommands.append((command, args))
4876 batchedcommands.append((command, args))
4875 continue
4877 continue
4876
4878
4877 ui.status(_(b'sending %s command\n') % command)
4879 ui.status(_(b'sending %s command\n') % command)
4878
4880
4879 if b'PUSHFILE' in args:
4881 if b'PUSHFILE' in args:
4880 with open(args[b'PUSHFILE'], 'rb') as fh:
4882 with open(args[b'PUSHFILE'], 'rb') as fh:
4881 del args[b'PUSHFILE']
4883 del args[b'PUSHFILE']
4882 res, output = peer._callpush(
4884 res, output = peer._callpush(
4883 command, fh, **pycompat.strkwargs(args)
4885 command, fh, **pycompat.strkwargs(args)
4884 )
4886 )
4885 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4887 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4886 ui.status(
4888 ui.status(
4887 _(b'remote output: %s\n') % stringutil.escapestr(output)
4889 _(b'remote output: %s\n') % stringutil.escapestr(output)
4888 )
4890 )
4889 else:
4891 else:
4890 with peer.commandexecutor() as e:
4892 with peer.commandexecutor() as e:
4891 res = e.callcommand(command, args).result()
4893 res = e.callcommand(command, args).result()
4892
4894
4893 ui.status(
4895 ui.status(
4894 _(b'response: %s\n')
4896 _(b'response: %s\n')
4895 % stringutil.pprint(res, bprefix=True, indent=2)
4897 % stringutil.pprint(res, bprefix=True, indent=2)
4896 )
4898 )
4897
4899
4898 elif action == b'batchbegin':
4900 elif action == b'batchbegin':
4899 if batchedcommands is not None:
4901 if batchedcommands is not None:
4900 raise error.Abort(_(b'nested batchbegin not allowed'))
4902 raise error.Abort(_(b'nested batchbegin not allowed'))
4901
4903
4902 batchedcommands = []
4904 batchedcommands = []
4903 elif action == b'batchsubmit':
4905 elif action == b'batchsubmit':
4904 # There is a batching API we could go through. But it would be
4906 # There is a batching API we could go through. But it would be
4905 # difficult to normalize requests into function calls. It is easier
4907 # difficult to normalize requests into function calls. It is easier
4906 # to bypass this layer and normalize to commands + args.
4908 # to bypass this layer and normalize to commands + args.
4907 ui.status(
4909 ui.status(
4908 _(b'sending batch with %d sub-commands\n')
4910 _(b'sending batch with %d sub-commands\n')
4909 % len(batchedcommands)
4911 % len(batchedcommands)
4910 )
4912 )
4911 assert peer is not None
4913 assert peer is not None
4912 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4914 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4913 ui.status(
4915 ui.status(
4914 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4916 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4915 )
4917 )
4916
4918
4917 batchedcommands = None
4919 batchedcommands = None
4918
4920
4919 elif action.startswith(b'httprequest '):
4921 elif action.startswith(b'httprequest '):
4920 if not opener:
4922 if not opener:
4921 raise error.Abort(
4923 raise error.Abort(
4922 _(b'cannot use httprequest without an HTTP peer')
4924 _(b'cannot use httprequest without an HTTP peer')
4923 )
4925 )
4924
4926
4925 request = action.split(b' ', 2)
4927 request = action.split(b' ', 2)
4926 if len(request) != 3:
4928 if len(request) != 3:
4927 raise error.Abort(
4929 raise error.Abort(
4928 _(
4930 _(
4929 b'invalid httprequest: expected format is '
4931 b'invalid httprequest: expected format is '
4930 b'"httprequest <method> <path>'
4932 b'"httprequest <method> <path>'
4931 )
4933 )
4932 )
4934 )
4933
4935
4934 method, httppath = request[1:]
4936 method, httppath = request[1:]
4935 headers = {}
4937 headers = {}
4936 body = None
4938 body = None
4937 frames = []
4939 frames = []
4938 for line in lines:
4940 for line in lines:
4939 line = line.lstrip()
4941 line = line.lstrip()
4940 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4942 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4941 if m:
4943 if m:
4942 # Headers need to use native strings.
4944 # Headers need to use native strings.
4943 key = pycompat.strurl(m.group(1))
4945 key = pycompat.strurl(m.group(1))
4944 value = pycompat.strurl(m.group(2))
4946 value = pycompat.strurl(m.group(2))
4945 headers[key] = value
4947 headers[key] = value
4946 continue
4948 continue
4947
4949
4948 if line.startswith(b'BODYFILE '):
4950 if line.startswith(b'BODYFILE '):
4949 with open(line.split(b' ', 1), b'rb') as fh:
4951 with open(line.split(b' ', 1), b'rb') as fh:
4950 body = fh.read()
4952 body = fh.read()
4951 elif line.startswith(b'frame '):
4953 elif line.startswith(b'frame '):
4952 frame = wireprotoframing.makeframefromhumanstring(
4954 frame = wireprotoframing.makeframefromhumanstring(
4953 line[len(b'frame ') :]
4955 line[len(b'frame ') :]
4954 )
4956 )
4955
4957
4956 frames.append(frame)
4958 frames.append(frame)
4957 else:
4959 else:
4958 raise error.Abort(
4960 raise error.Abort(
4959 _(b'unknown argument to httprequest: %s') % line
4961 _(b'unknown argument to httprequest: %s') % line
4960 )
4962 )
4961
4963
4962 url = path + httppath
4964 url = path + httppath
4963
4965
4964 if frames:
4966 if frames:
4965 body = b''.join(bytes(f) for f in frames)
4967 body = b''.join(bytes(f) for f in frames)
4966
4968
4967 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4969 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4968
4970
4969 # urllib.Request insists on using has_data() as a proxy for
4971 # urllib.Request insists on using has_data() as a proxy for
4970 # determining the request method. Override that to use our
4972 # determining the request method. Override that to use our
4971 # explicitly requested method.
4973 # explicitly requested method.
4972 req.get_method = lambda: pycompat.sysstr(method)
4974 req.get_method = lambda: pycompat.sysstr(method)
4973
4975
4974 try:
4976 try:
4975 res = opener.open(req)
4977 res = opener.open(req)
4976 body = res.read()
4978 body = res.read()
4977 except util.urlerr.urlerror as e:
4979 except util.urlerr.urlerror as e:
4978 # read() method must be called, but only exists in Python 2
4980 # read() method must be called, but only exists in Python 2
4979 getattr(e, 'read', lambda: None)()
4981 getattr(e, 'read', lambda: None)()
4980 continue
4982 continue
4981
4983
4982 ct = res.headers.get('Content-Type')
4984 ct = res.headers.get('Content-Type')
4983 if ct == 'application/mercurial-cbor':
4985 if ct == 'application/mercurial-cbor':
4984 ui.write(
4986 ui.write(
4985 _(b'cbor> %s\n')
4987 _(b'cbor> %s\n')
4986 % stringutil.pprint(
4988 % stringutil.pprint(
4987 cborutil.decodeall(body), bprefix=True, indent=2
4989 cborutil.decodeall(body), bprefix=True, indent=2
4988 )
4990 )
4989 )
4991 )
4990
4992
4991 elif action == b'close':
4993 elif action == b'close':
4992 assert peer is not None
4994 assert peer is not None
4993 peer.close()
4995 peer.close()
4994 elif action == b'readavailable':
4996 elif action == b'readavailable':
4995 if not stdout or not stderr:
4997 if not stdout or not stderr:
4996 raise error.Abort(
4998 raise error.Abort(
4997 _(b'readavailable not available on this peer')
4999 _(b'readavailable not available on this peer')
4998 )
5000 )
4999
5001
5000 stdin.close()
5002 stdin.close()
5001 stdout.read()
5003 stdout.read()
5002 stderr.read()
5004 stderr.read()
5003
5005
5004 elif action == b'readline':
5006 elif action == b'readline':
5005 if not stdout:
5007 if not stdout:
5006 raise error.Abort(_(b'readline not available on this peer'))
5008 raise error.Abort(_(b'readline not available on this peer'))
5007 stdout.readline()
5009 stdout.readline()
5008 elif action == b'ereadline':
5010 elif action == b'ereadline':
5009 if not stderr:
5011 if not stderr:
5010 raise error.Abort(_(b'ereadline not available on this peer'))
5012 raise error.Abort(_(b'ereadline not available on this peer'))
5011 stderr.readline()
5013 stderr.readline()
5012 elif action.startswith(b'read '):
5014 elif action.startswith(b'read '):
5013 count = int(action.split(b' ', 1)[1])
5015 count = int(action.split(b' ', 1)[1])
5014 if not stdout:
5016 if not stdout:
5015 raise error.Abort(_(b'read not available on this peer'))
5017 raise error.Abort(_(b'read not available on this peer'))
5016 stdout.read(count)
5018 stdout.read(count)
5017 elif action.startswith(b'eread '):
5019 elif action.startswith(b'eread '):
5018 count = int(action.split(b' ', 1)[1])
5020 count = int(action.split(b' ', 1)[1])
5019 if not stderr:
5021 if not stderr:
5020 raise error.Abort(_(b'eread not available on this peer'))
5022 raise error.Abort(_(b'eread not available on this peer'))
5021 stderr.read(count)
5023 stderr.read(count)
5022 else:
5024 else:
5023 raise error.Abort(_(b'unknown action: %s') % action)
5025 raise error.Abort(_(b'unknown action: %s') % action)
5024
5026
5025 if batchedcommands is not None:
5027 if batchedcommands is not None:
5026 raise error.Abort(_(b'unclosed "batchbegin" request'))
5028 raise error.Abort(_(b'unclosed "batchbegin" request'))
5027
5029
5028 if peer:
5030 if peer:
5029 peer.close()
5031 peer.close()
5030
5032
5031 if proc:
5033 if proc:
5032 proc.kill()
5034 proc.kill()
@@ -1,55 +1,139 b''
1 # revlogutils/debug.py - utility used for revlog debuging
1 # revlogutils/debug.py - utility used for revlog debuging
2 #
2 #
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
4 # Copyright 2022 Octobus <contact@octobus.net>
4 # Copyright 2022 Octobus <contact@octobus.net>
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 from .. import (
9 from .. import (
10 node as nodemod,
10 node as nodemod,
11 )
11 )
12
12
13 from . import (
14 constants,
15 )
16
17 INDEX_ENTRY_DEBUG_COLUMN = []
18
19 NODE_SIZE = object()
20
21
22 class _column_base:
23 """constains the definition of a revlog column
24
25 name: the column header,
26 value_func: the function called to get a value,
27 size: the width of the column.
28 """
29
30 def __init__(self, name, value_func, size=None):
31 self.name = name
32 self.value_func = value_func
33 if size is not NODE_SIZE:
34 if size is None:
35 size = 8 # arbitrary default
36 size = max(len(name), size)
37 self._size = size
38
39 def get_size(self, node_size):
40 if self._size is NODE_SIZE:
41 return node_size
42 else:
43 return self._size
44
45
46 def debug_column(name, size=None):
47 """decorated function is registered as a column
48
49 name: the name of the column,
50 size: the expected size of the column.
51 """
52
53 def register(func):
54 entry = _column_base(
55 name=name,
56 value_func=func,
57 size=size,
58 )
59 INDEX_ENTRY_DEBUG_COLUMN.append(entry)
60 return entry
61
62 return register
63
64
65 @debug_column(b"rev", size=6)
66 def _rev(index, rev, entry, hexfn):
67 return b"%d" % rev
68
69
70 @debug_column(b"linkrev", size=6)
71 def _linkrev(index, rev, entry, hexfn):
72 return b"%d" % entry[constants.ENTRY_LINK_REV]
73
74
75 @debug_column(b"nodeid", size=NODE_SIZE)
76 def _nodeid(index, rev, entry, hexfn):
77 return hexfn(entry[constants.ENTRY_NODE_ID])
78
79
80 @debug_column(b"p1-nodeid", size=NODE_SIZE)
81 def _p1_node(index, rev, entry, hexfn):
82 parent = entry[constants.ENTRY_PARENT_1]
83 p_entry = index[parent]
84 return hexfn(p_entry[constants.ENTRY_NODE_ID])
85
86
87 @debug_column(b"p2-nodeid", size=NODE_SIZE)
88 def _p2_node(index, rev, entry, hexfn):
89 parent = entry[constants.ENTRY_PARENT_2]
90 p_entry = index[parent]
91 return hexfn(p_entry[constants.ENTRY_NODE_ID])
92
13
93
14 def debug_index(
94 def debug_index(
15 ui,
95 ui,
16 repo,
96 repo,
17 formatter,
97 formatter,
18 revlog,
98 revlog,
19 full_node,
99 full_node,
20 ):
100 ):
21 """display index data for a revlog"""
101 """display index data for a revlog"""
22 if full_node:
102 if full_node:
23 hexfn = nodemod.hex
103 hexfn = nodemod.hex
24 else:
104 else:
25 hexfn = nodemod.short
105 hexfn = nodemod.short
26
106
27 idlen = 12
107 idlen = 12
28 for i in revlog:
108 for i in revlog:
29 idlen = len(hexfn(revlog.node(i)))
109 idlen = len(hexfn(revlog.node(i)))
30 break
110 break
31
111
32 fm = formatter
112 fm = formatter
33
113
34 fm.plain(
114 header_pieces = []
35 b' rev linkrev %s %s %s\n'
115 for column in INDEX_ENTRY_DEBUG_COLUMN:
36 % (
116 size = column.get_size(idlen)
37 b'nodeid'.rjust(idlen),
117 name = column.name
38 b'p1-nodeid'.rjust(idlen),
118 header_pieces.append(name.rjust(size))
39 b'p2-nodeid'.rjust(idlen),
119
40 )
120 fm.plain(b' '.join(header_pieces) + b'\n')
41 )
121
122 index = revlog.index
42
123
43 for rev in revlog:
124 for rev in revlog:
44 node = revlog.node(rev)
125 fm.startitem()
45 parents = revlog.parents(node)
126 entry = index[rev]
127 first = True
128 for column in INDEX_ENTRY_DEBUG_COLUMN:
129 if not first:
130 fm.plain(b' ')
131 first = False
46
132
47 fm.startitem()
133 size = column.get_size(idlen)
48 fm.write(b'rev', b'%6d ', rev)
134 value = column.value_func(index, rev, entry, hexfn)
49 fm.write(b'linkrev', b'%7d ', revlog.linkrev(rev))
135 display = b"%%%ds" % size
50 fm.write(b'node', b'%s ', hexfn(node))
136 fm.write(column.name, display, value)
51 fm.write(b'p1', b'%s ', hexfn(parents[0]))
52 fm.write(b'p2', b'%s', hexfn(parents[1]))
53 fm.plain(b'\n')
137 fm.plain(b'\n')
54
138
55 fm.end()
139 fm.end()
General Comments 0
You need to be logged in to leave comments. Login now